Skip to content

Commit 942341c

Browse files
Merge pull request #1 from casework/ci-workflows
Workflows
2 parents cfb518d + d514e3d commit 942341c

File tree

6 files changed

+216
-3
lines changed

6 files changed

+216
-3
lines changed

.github/workflows/build.yml

Lines changed: 86 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,86 @@
1+
name: Build Jobs
2+
3+
on: [ push ]
4+
5+
# Build all of the projects and run them against the CASE graph
6+
7+
jobs:
8+
dotnet:
9+
runs-on: ubuntu-latest
10+
steps:
11+
12+
# Get the code from the repository
13+
- name: Get Repo
14+
uses: actions/checkout@v3
15+
16+
# Install the .NET SDK
17+
- name: Setup .NET
18+
uses: actions/[email protected]
19+
with:
20+
dotnet-version: 6.x
21+
22+
# Build the project
23+
- name: Build
24+
run: dotnet build
25+
working-directory: ./dotnet/
26+
27+
# Run the built CASE conversion example
28+
- name: Run CASE Conversion
29+
run: |
30+
dotnet run ../data/geo.json ../output/dotnet.geojson
31+
cat ../output/dotnet.geojson
32+
working-directory: ./dotnet/
33+
34+
python:
35+
runs-on: ubuntu-latest
36+
steps:
37+
38+
# Get the code from the repository
39+
- name: Get Repo
40+
uses: actions/checkout@v3
41+
42+
# Install Python
43+
- name: Setup Python
44+
uses: actions/[email protected]
45+
with:
46+
python-version: 3.9
47+
48+
# Install the rdflib dependency
49+
- name: Install Dependencies
50+
run: pip3 install rdflib
51+
52+
# Run the Python CASE conversion example
53+
- name: Run CASE Conversion
54+
run: |
55+
ls
56+
python3 CASE2GeoJSON.py ../data/geo.json ../output/python.geojson
57+
cat ../output/python.geojson
58+
working-directory: ./python/
59+
60+
java:
61+
runs-on: ubuntu-latest
62+
steps:
63+
64+
# Get the code from the repository
65+
- name: Get Repo
66+
uses: actions/checkout@v3
67+
68+
# Install Java
69+
- name: Setup Java
70+
uses: actions/[email protected]
71+
with:
72+
distribution: 'temurin'
73+
java-version: '18'
74+
cache: 'maven'
75+
76+
# Build the project
77+
- name: Build Project
78+
run: mvn compile assembly:single
79+
working-directory: ./java/case2geo/
80+
81+
# Run the Java CASE conversion example
82+
- name: Run CASE Conversion
83+
run: |
84+
java -jar ./target/case2geo-0.1.0.jar ../../data/geo.json ../../output/java.geojson
85+
cat ../../output/java.geojson
86+
working-directory: ./java/case2geo/

.github/workflows/case_validate.yml

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,22 @@
1+
name: CASE Validate
2+
3+
on: [ push ]
4+
5+
# Ensure all of the files in /data/ are valid CASE graphs.
6+
7+
jobs:
8+
validate:
9+
runs-on: ubuntu-latest
10+
steps:
11+
12+
# Get the code from the repository to be packaged
13+
- name: Get Repo
14+
uses: actions/checkout@v3
15+
16+
# Validate the CASE graphs in the /data/ directory
17+
- name: CASE Validate
18+
uses: kchason/[email protected]
19+
with:
20+
case-path: "./data/"
21+
case-version: "case-1.2.0"
22+
extension-filter: "json"

.github/workflows/lint.yml

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,20 @@
1+
name: Lint
2+
3+
on: [ push ]
4+
5+
# Build all of the projects and run them against the CASE graph
6+
7+
jobs:
8+
lint:
9+
runs-on: ubuntu-latest
10+
steps:
11+
12+
# Get the code from the repository
13+
- name: Get Repo
14+
uses: actions/checkout@v3
15+
16+
# Install pre-commit and run it
17+
- name: Pre-commit Checks
18+
run: |
19+
pip -q install pre-commit
20+
pre-commit run --all-files

.pre-commit-config.yaml

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,6 @@ repos:
1010
- id: end-of-file-fixer
1111
- id: check-yaml
1212
- id: check-added-large-files
13-
- id: check-case-conflict
1413
- id: check-merge-conflict
1514
- id: check-json
1615
- id: check-xml

java/case2geo/pom.xml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -15,8 +15,8 @@
1515

1616
<properties>
1717
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
18-
<maven.compiler.source>1.18</maven.compiler.source>
19-
<maven.compiler.target>1.18</maven.compiler.target>
18+
<maven.compiler.source>18</maven.compiler.source>
19+
<maven.compiler.target>18</maven.compiler.target>
2020
<exec.mainClass>org.caseontology.examples.CASE2Geo</exec.mainClass>
2121
</properties>
2222

python/CASE2GeoJSON.py

Lines changed: 86 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,86 @@
1+
import json
2+
import sys
3+
from os.path import exists, isdir, isfile
4+
5+
from geotypes import GeoRecord
6+
from geoutilities import records_to_geojson, remove_nulls
7+
from rdflib import Graph
8+
9+
# Parse the arguments from the CLI to get the input and output filenames
10+
if len(sys.argv) != 3:
11+
print("Usage: python case2geojson.py <input-file> <output-file>")
12+
sys.exit(1)
13+
14+
input_filename: str = sys.argv[1]
15+
output_filename: str = sys.argv[2]
16+
17+
# Ensure the input file exists
18+
if not exists(input_filename) and not isfile(input_filename):
19+
print(f"File not found: {input_filename}")
20+
sys.exit(1)
21+
22+
# Ensure the output directory exists
23+
output_directory: str = output_filename[: output_filename.rfind("/")]
24+
if not exists(output_directory) and not isdir(output_directory):
25+
print(f"Directory not found: {output_directory}")
26+
sys.exit(1)
27+
28+
# Build the rdflib graph from the input file
29+
graph: Graph = Graph()
30+
graph.parse(input_filename)
31+
32+
# Write the SPARQL query to get the data from the graph
33+
query: str = """
34+
SELECT ?lLatitude ?lLongitude ?lAddressType ?lCountry ?lLocality ?lPostalCode ?lRegion ?lStreet
35+
WHERE
36+
{
37+
?nLocation a uco-location:Location .
38+
OPTIONAL
39+
{
40+
?nLocation uco-core:hasFacet ?nLatLongFacet .
41+
?nLatLongFacet a uco-location:LatLongCoordinatesFacet .
42+
OPTIONAL { ?nLatLongFacet uco-location:latitude ?lLatitude . }
43+
OPTIONAL { ?nLatLongFacet uco-location:longitude ?lLongitude . }
44+
}
45+
46+
OPTIONAL {
47+
?nLocation uco-core:hasFacet ?nSimpleAddressFacet .
48+
?nSimpleAddressFacet a uco-location:SimpleAddressFacet .
49+
OPTIONAL { ?nSimpleAddressFacet uco-location:addressType ?lAddressType . }
50+
OPTIONAL { ?nSimpleAddressFacet uco-location:country ?lCountry . }
51+
OPTIONAL { ?nSimpleAddressFacet uco-location:locality ?lLocality . }
52+
OPTIONAL { ?nSimpleAddressFacet uco-location:postalCode ?lPostalCode . }
53+
OPTIONAL { ?nSimpleAddressFacet uco-location:region ?lRegion . }
54+
OPTIONAL { ?nSimpleAddressFacet uco-location:street ?lStreet . }
55+
}
56+
}
57+
"""
58+
59+
results = graph.query(query)
60+
61+
# Define the list of GeoRecords
62+
records: list[GeoRecord] = []
63+
64+
# Loop through the results and add them to the list of GeoRecords if the latitude and longitude are present
65+
for row in results:
66+
geo_record: GeoRecord = GeoRecord()
67+
geo_record.Latitude = row.lLatitude
68+
geo_record.Longitude = row.lLongitude
69+
geo_record.AddressType = row.lAddressType
70+
geo_record.Country = row.lCountry
71+
geo_record.Locality = row.lLocality
72+
geo_record.PostalCode = row.lPostalCode
73+
geo_record.Region = row.lRegion
74+
geo_record.Street = row.lStreet
75+
records.append(geo_record)
76+
77+
# Convert the data to a GeoJSON structured object
78+
geoJSON = records_to_geojson(records)
79+
80+
# Remove null values from the GeoJSON object
81+
geoDict: dict = geoJSON.reprJSON()
82+
geoDict = remove_nulls(geoDict)
83+
84+
# Write the GeoJSON object to the output file
85+
with open(output_filename, "w") as output_file:
86+
output_file.write(json.dumps(geoDict, indent=4))

0 commit comments

Comments
 (0)