Skip to content

requirements.txt: Unpin numpy #2287

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 4 commits into from
Aug 6, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
34 changes: 19 additions & 15 deletions graphs/karger.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,20 +5,19 @@
import random
from typing import Dict, List, Set, Tuple


# Adjacency list representation of this graph:
# https://en.wikipedia.org/wiki/File:Single_run_of_Karger%E2%80%99s_Mincut_algorithm.svg
TEST_GRAPH = {
'1': ['2', '3', '4', '5'],
'2': ['1', '3', '4', '5'],
'3': ['1', '2', '4', '5', '10'],
'4': ['1', '2', '3', '5', '6'],
'5': ['1', '2', '3', '4', '7'],
'6': ['7', '8', '9', '10', '4'],
'7': ['6', '8', '9', '10', '5'],
'8': ['6', '7', '9', '10'],
'9': ['6', '7', '8', '10'],
'10': ['6', '7', '8', '9', '3']
"1": ["2", "3", "4", "5"],
"2": ["1", "3", "4", "5"],
"3": ["1", "2", "4", "5", "10"],
"4": ["1", "2", "3", "5", "6"],
"5": ["1", "2", "3", "4", "7"],
"6": ["7", "8", "9", "10", "4"],
"7": ["6", "8", "9", "10", "5"],
"8": ["6", "7", "9", "10"],
"9": ["6", "7", "8", "10"],
"10": ["6", "7", "8", "9", "3"],
}


Expand Down Expand Up @@ -61,8 +60,9 @@ def partition_graph(graph: Dict[str, List[str]]) -> Set[Tuple[str, str]]:
for neighbor in uv_neighbors:
graph_copy[neighbor].append(uv)

contracted_nodes[uv] = {contracted_node for contracted_node in
contracted_nodes[u].union(contracted_nodes[v])}
contracted_nodes[uv] = {
node for node in contracted_nodes[u].union(contracted_nodes[v])
}

# Remove nodes u and v.
del graph_copy[u]
Expand All @@ -75,8 +75,12 @@ def partition_graph(graph: Dict[str, List[str]]) -> Set[Tuple[str, str]]:

# Find cutset.
groups = [contracted_nodes[node] for node in graph_copy]
return {(node, neighbor) for node in groups[0]
for neighbor in graph[node] if neighbor in groups[1]}
return {
(node, neighbor)
for node in groups[0]
for neighbor in graph[node]
if neighbor in groups[1]
}


if __name__ == "__main__":
Expand Down
10 changes: 5 additions & 5 deletions other/scoring_algorithm.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
'''
"""
developed by: markmelnic
original repo: https://github.com/markmelnic/Scoring-Algorithm

Expand All @@ -23,17 +23,17 @@

>>> procentual_proximity([[20, 60, 2012],[23, 90, 2015],[22, 50, 2011]], [0, 0, 1])
[[20, 60, 2012, 2.0], [23, 90, 2015, 1.0], [22, 50, 2011, 1.3333333333333335]]
'''
"""


def procentual_proximity(source_data : list, weights : list) -> list:
def procentual_proximity(source_data: list, weights: list) -> list:

'''
"""
weights - int list
possible values - 0 / 1
0 if lower values have higher weight in the data set
1 if higher values have higher weight in the data set
'''
"""

# getting data
data_lists = []
Expand Down
2 changes: 1 addition & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ flake8
keras
matplotlib
mypy
numpy>=1.17.4
numpy
opencv-python
pandas
pillow
Expand Down
8 changes: 4 additions & 4 deletions web_programming/world_covid19_stats.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
#!/usr/bin/env python3

'''
"""
Provide the current worldwide COVID-19 statistics.
This data is being scrapped from 'https://www.worldometers.info/coronavirus/'.
'''
"""

import requests
from bs4 import BeautifulSoup
Expand All @@ -13,8 +13,8 @@ def world_covid19_stats(url: str = "https://www.worldometers.info/coronavirus")
"""
Return a dict of current worldwide COVID-19 statistics
"""
soup = BeautifulSoup(requests.get(url).text, 'html.parser')
keys = soup.findAll('h1')
soup = BeautifulSoup(requests.get(url).text, "html.parser")
keys = soup.findAll("h1")
values = soup.findAll("div", {"class": "maincounter-number"})
keys += soup.findAll("span", {"class": "panel-title"})
values += soup.findAll("div", {"class": "number-table-main"})
Expand Down