Skip to content

Commit 59b9ec2

Browse files
committed
Merge branch 'master' into arduifine
2 parents 82baaf6 + 4708cde commit 59b9ec2

File tree

465 files changed

+71550
-97552
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

465 files changed

+71550
-97552
lines changed

Diff for: .dependabot/config.yml

-10
This file was deleted.

Diff for: .editorconfig

+3-3
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,6 @@ indent_size = 4
1919
indent_style = space
2020
indent_size = 2
2121

22-
[*.sh]
23-
indent_style = tab
24-
indent_size = 4
22+
[*.{bash,sh}]
23+
indent_size = 2
24+
indent_style = space

Diff for: .flake8

+11-4
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,16 @@
1-
# TODO: move this to pyproject.toml when supported: https://gitlab.com/pycqa/flake8/merge_requests/245
1+
# Source: https://github.com/arduino/tooling-project-assets/blob/main/workflow-templates/assets/check-python/.flake8
2+
# See: https://flake8.pycqa.org/en/latest/user/configuration.html
3+
# The code style defined in this file is the official standardized style to be used in all Arduino tooling projects and
4+
# should not be modified.
25

36
[flake8]
4-
max-line-length = 120
7+
doctests = True
8+
per-file-ignores =
9+
test/test_upload_mock.py:E501
510
ignore =
611
E741,
7-
# W503 and W504 are mutually exclusive, so one or the other must be ignored.
8-
# PEP 8 recommends line break before, so we keep W504.
12+
# W503 and W504 are mutually exclusive. PEP 8 recommends line break before.
913
W503
14+
max-complexity = 10
15+
max-line-length = 120
16+
select = E,W,F,C,N

Diff for: .github/dependabot.yml

+13
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,13 @@
1+
# See: https://docs.github.com/en/code-security/supply-chain-security/configuration-options-for-dependency-updates#about-the-dependabotyml-file
2+
version: 2
3+
4+
updates:
5+
# Configure check for outdated GitHub Actions actions in workflows.
6+
# Source: https://github.com/arduino/tooling-project-assets/blob/main/workflow-templates/assets/dependabot/README.md
7+
# See: https://docs.github.com/en/code-security/supply-chain-security/keeping-your-actions-up-to-date-with-dependabot
8+
- package-ecosystem: github-actions
9+
directory: / # Check the repository's workflows under /.github/workflows/
10+
schedule:
11+
interval: daily
12+
labels:
13+
- "topic: infrastructure"

Diff for: .github/label-configuration-files/labels.yml

+21
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,21 @@
1+
# Used by the "Sync Labels" workflow
2+
# See: https://github.com/Financial-Times/github-label-sync#label-config-file
3+
4+
- name: "architecture: arm64"
5+
color: ff00ff
6+
description: Specific to 64 bit ARM host architecture
7+
- name: "architecture: armv6"
8+
color: ff00ff
9+
description: Specific to hosts using an ARMv6 core
10+
- name: "architecture: armv7"
11+
color: ff00ff
12+
description: Specific to hosts using an ARMv7 core
13+
- name: "topic: CLI"
14+
color: "00ffff"
15+
description: Related to the command line interface
16+
- name: "topic: gRPC"
17+
color: "00ffff"
18+
description: Related to the gRPC interface
19+
- name: "topic: packaging"
20+
color: "00ffff"
21+
description: Related to the release distribution package

Diff for: .github/tools/fetch_athena_stats.py

+135
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,135 @@
1+
import boto3
2+
import semver
3+
import os
4+
import logging
5+
import uuid
6+
import time
7+
8+
9+
# logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
10+
log = logging.getLogger()
11+
logging.getLogger("boto3").setLevel(logging.CRITICAL)
12+
logging.getLogger("botocore").setLevel(logging.CRITICAL)
13+
logging.getLogger("urllib3").setLevel(logging.CRITICAL)
14+
15+
16+
def execute(client, statement, dest_s3_output_location):
17+
log.info("execute query: {} dumping in {}".format(statement, dest_s3_output_location))
18+
result = client.start_query_execution(
19+
QueryString=statement,
20+
ClientRequestToken=str(uuid.uuid4()),
21+
ResultConfiguration={
22+
"OutputLocation": dest_s3_output_location,
23+
},
24+
)
25+
execution_id = result["QueryExecutionId"]
26+
log.info("wait for query {} completion".format(execution_id))
27+
wait_for_query_execution_completion(client, execution_id)
28+
log.info("operation successful")
29+
return execution_id
30+
31+
32+
def wait_for_query_execution_completion(client, query_execution_id):
33+
query_ended = False
34+
while not query_ended:
35+
query_execution = client.get_query_execution(QueryExecutionId=query_execution_id)
36+
state = query_execution["QueryExecution"]["Status"]["State"]
37+
if state == "SUCCEEDED":
38+
query_ended = True
39+
elif state in ["FAILED", "CANCELLED"]:
40+
raise BaseException(
41+
"query failed or canceled: {}".format(query_execution["QueryExecution"]["Status"]["StateChangeReason"])
42+
)
43+
else:
44+
time.sleep(1)
45+
46+
47+
def valid(key):
48+
split = key.split("_")
49+
if len(split) < 1:
50+
return False
51+
try:
52+
semver.parse(split[0])
53+
except ValueError:
54+
return False
55+
return True
56+
57+
58+
def get_results(client, execution_id):
59+
results_paginator = client.get_paginator("get_query_results")
60+
results_iter = results_paginator.paginate(QueryExecutionId=execution_id, PaginationConfig={"PageSize": 1000})
61+
res = {}
62+
for results_page in results_iter:
63+
for row in results_page["ResultSet"]["Rows"][1:]:
64+
# Loop through the JSON objects
65+
key = row["Data"][0]["VarCharValue"]
66+
if valid(key):
67+
res[key] = row["Data"][1]["VarCharValue"]
68+
69+
return res
70+
71+
72+
def convert_data(data):
73+
result = []
74+
for key, value in data.items():
75+
# 0.18.0_macOS_64bit.tar.gz
76+
split_key = key.split("_")
77+
if len(split_key) != 3:
78+
continue
79+
(version, os_version, arch) = split_key
80+
arch_split = arch.split(".")
81+
if len(arch_split) < 1:
82+
continue
83+
arch = arch_split[0]
84+
if len(arch) > 10:
85+
# This can't be an architecture really.
86+
# It's an ugly solution but works for now so deal with it.
87+
continue
88+
repo = os.environ["GITHUB_REPOSITORY"].split("/")[1]
89+
result.append(
90+
{
91+
"type": "gauge",
92+
"name": "arduino.downloads.total",
93+
"value": value,
94+
"host": os.environ["GITHUB_REPOSITORY"],
95+
"tags": [
96+
f"version:{version}",
97+
f"os:{os_version}",
98+
f"arch:{arch}",
99+
"cdn:downloads.arduino.cc",
100+
f"project:{repo}",
101+
],
102+
}
103+
)
104+
105+
return result
106+
107+
108+
if __name__ == "__main__":
109+
DEST_S3_OUTPUT = os.environ["AWS_ATHENA_OUTPUT_LOCATION"]
110+
AWS_ATHENA_SOURCE_TABLE = os.environ["AWS_ATHENA_SOURCE_TABLE"]
111+
112+
session = boto3.session.Session(region_name="us-east-1")
113+
athena_client = session.client("athena")
114+
115+
# Load all partitions before querying downloads
116+
execute(athena_client, f"MSCK REPAIR TABLE {AWS_ATHENA_SOURCE_TABLE};", DEST_S3_OUTPUT)
117+
118+
query = f"""SELECT replace(json_extract_scalar(url_decode(url_decode(querystring)),
119+
'$.data.url'), 'https://downloads.arduino.cc/arduino-cli/arduino-cli_', '')
120+
AS flavor, count(json_extract(url_decode(url_decode(querystring)),'$')) AS gauge
121+
FROM {AWS_ATHENA_SOURCE_TABLE}
122+
WHERE json_extract_scalar(url_decode(url_decode(querystring)),'$.data.url')
123+
LIKE 'https://downloads.arduino.cc/arduino-cli/arduino-cli_%'
124+
AND json_extract_scalar(url_decode(url_decode(querystring)),'$.data.url')
125+
NOT LIKE '%latest%' -- exclude latest redirect
126+
AND json_extract_scalar(url_decode(url_decode(querystring)),'$.data.url')
127+
NOT LIKE '%alpha%' -- exclude early alpha releases
128+
AND json_extract_scalar(url_decode(url_decode(querystring)),'$.data.url')
129+
NOT LIKE '%.tar.bz2%' -- exclude very old releases archive formats
130+
group by 1 ;"""
131+
exec_id = execute(athena_client, query, DEST_S3_OUTPUT)
132+
results = get_results(athena_client, exec_id)
133+
result_json = convert_data(results)
134+
135+
print(f"::set-output name=result::{result_json}")

Diff for: .github/tools/fetch_athena_stats.sh

-93
This file was deleted.

Diff for: .github/workflows/arduino-stats.yaml

+10-7
Original file line numberDiff line numberDiff line change
@@ -2,8 +2,10 @@ name: arduino-stats
22

33
on:
44
schedule:
5-
# run every day at 12:30:00
6-
- cron: "30 12 * * *"
5+
# run every day at 07:00 AM, 03:00 PM and 11:00 PM
6+
- cron: "0 7,15,23 * * *"
7+
workflow_dispatch:
8+
repository_dispatch:
79

810
jobs:
911
push-stats:
@@ -16,6 +18,10 @@ jobs:
1618
- name: Checkout
1719
uses: actions/checkout@v2
1820

21+
- uses: actions/setup-python@v2
22+
with:
23+
python-version: "3.x"
24+
1925
- name: Fetch downloads count form Arduino CDN using AWS Athena
2026
id: fetch
2127
env:
@@ -25,11 +31,8 @@ jobs:
2531
AWS_ATHENA_OUTPUT_LOCATION: ${{ secrets.STATS_AWS_ATHENA_OUTPUT_LOCATION }}
2632
GITHUB_REPOSITORY: ${{ github.repository }}
2733
run: |
28-
# Fetch jq 1.6 as VM has only 1.5 ATM
29-
wget -q https://github.com/stedolan/jq/releases/download/jq-1.6/jq-linux64 -O jq
30-
chmod +x jq
31-
PATH="${{ github.workspace }}:$PATH"
32-
.github/tools/fetch_athena_stats.sh
34+
pip install boto3 semver
35+
python .github/tools/fetch_athena_stats.py
3336
3437
- name: Send metrics
3538
uses: masci/datadog@v1

0 commit comments

Comments
 (0)