Skip to content

Commit ed55752

Browse files
authored
Merge pull request #176 from casework/release-0.17.0
Release 0.17.0
2 parents 0b19779 + 63f40b9 commit ed55752

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

41 files changed

+22550
-439
lines changed

.github/workflows/cicd.yml

Lines changed: 13 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -39,32 +39,37 @@ jobs:
3939
matrix:
4040
python-version:
4141
- '3.9'
42-
- '3.12'
42+
- '3.13'
4343

4444
steps:
45-
- uses: actions/checkout@v3
46-
- uses: actions/setup-java@v3
45+
- uses: actions/checkout@v4
46+
47+
- uses: actions/setup-java@v4
4748
with:
4849
distribution: 'temurin'
4950
java-version: '11'
51+
5052
- name: Set up Python ${{ matrix.python-version }}
51-
uses: actions/setup-python@v4
53+
uses: actions/setup-python@v5
5254
with:
5355
python-version: ${{ matrix.python-version }}
56+
5457
- name: Pre-commit Checks
5558
run: |
5659
pip -q install pre-commit
5760
pre-commit run --all-files
61+
5862
- name: Start from clean state
5963
run: make clean
64+
6065
- name: Run tests
6166
run: make PYTHON3=python check
6267

6368
# Build the binary wheel as well as the source tar
6469
- name: Build Objects
6570
run: |
66-
pip install -q twine wheel
67-
python setup.py sdist bdist_wheel
71+
pip install -q twine build
72+
python -m build
6873
6974
# Ensure the objects were packaged correctly and there wasn't an issue with
7075
# the compilation or packaging process.
@@ -73,9 +78,9 @@ jobs:
7378

7479
# Upload the packages on all develop and main pipleines for test consumption
7580
- name: Upload HTML Docs
76-
uses: actions/upload-artifact@v3
81+
uses: actions/upload-artifact@v4
7782
with:
78-
name: packages
83+
name: packages-${{ matrix.python-version }}
7984
path: ./dist/
8085

8186
# If this commit is the result of a Git tag, push the wheel and tar packages

.github/workflows/prerelease.yml

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -30,12 +30,12 @@ jobs:
3030
matrix:
3131
python-version:
3232
- '3.9'
33-
- '3.12'
33+
- '3.13'
3434

3535
steps:
36-
- uses: actions/checkout@v3
36+
- uses: actions/checkout@v4
3737
- name: Set up Python ${{ matrix.python-version }}
38-
uses: actions/setup-python@v4
38+
uses: actions/setup-python@v5
3939
with:
4040
python-version: ${{ matrix.python-version }}
4141
- name: Review dependencies

.pre-commit-config.yaml

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,14 @@
11
repos:
22
- repo: https://github.com/psf/black
3-
rev: 23.12.1
3+
rev: 25.1.0
44
hooks:
55
- id: black
66
- repo: https://github.com/pycqa/flake8
7-
rev: 7.0.0
7+
rev: 7.2.0
88
hooks:
99
- id: flake8
1010
- repo: https://github.com/pycqa/isort
11-
rev: 5.13.2
11+
rev: 6.0.1
1212
hooks:
1313
- id: isort
1414
name: isort (python)

case_utils/case_file/__init__.py

Lines changed: 3 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@
1818
This module creates a graph object that provides a basic UCO characterization of a single file. The gathered metadata is among the more "durable" file characteristics, i.e. characteristics that would remain consistent when transferring a file between locations.
1919
"""
2020

21-
__version__ = "0.6.0"
21+
__version__ = "0.7.0"
2222

2323
import argparse
2424
import datetime
@@ -38,7 +38,6 @@
3838
NS_UCO_CORE,
3939
NS_UCO_OBSERVABLE,
4040
NS_UCO_TYPES,
41-
NS_UCO_VOCABULARY,
4241
NS_XSD,
4342
)
4443

@@ -228,14 +227,9 @@ def create_file_node(
228227

229228
l_hash_method: rdflib.Literal
230229
if key in ("sha3_256", "sha3_512"):
231-
l_hash_method = rdflib.Literal(
232-
key.replace("_", "-").upper(),
233-
datatype=NS_UCO_VOCABULARY.HashNameVocab,
234-
)
230+
l_hash_method = rdflib.Literal(key.replace("_", "-").upper())
235231
else:
236-
l_hash_method = rdflib.Literal(
237-
key.upper(), datatype=NS_UCO_VOCABULARY.HashNameVocab
238-
)
232+
l_hash_method = rdflib.Literal(key.upper())
239233

240234
hash_value: str = getattr(successful_hashdict, key)
241235
l_hash_value = rdflib.Literal(hash_value.upper(), datatype=NS_XSD.hexBinary)
@@ -300,7 +294,6 @@ def main() -> None:
300294
graph.namespace_manager.bind("uco-core", NS_UCO_CORE)
301295
graph.namespace_manager.bind("uco-observable", NS_UCO_OBSERVABLE)
302296
graph.namespace_manager.bind("uco-types", NS_UCO_TYPES)
303-
graph.namespace_manager.bind("uco-vocabulary", NS_UCO_VOCABULARY)
304297
graph.namespace_manager.bind("xsd", NS_XSD)
305298

306299
output_format = None

case_utils/case_sparql_construct/__init__.py

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@
1818
This script executes a SPARQL CONSTRUCT query, returning a graph of the generated triples.
1919
"""
2020

21-
__version__ = "0.2.6"
21+
__version__ = "0.2.7"
2222

2323
import argparse
2424
import logging
@@ -42,9 +42,11 @@ def main() -> None:
4242

4343
# Configure debug logging before running parse_args, because there could be an error raised before the construction of the argument parser.
4444
logging.basicConfig(
45-
level=logging.DEBUG
46-
if ("--debug" in sys.argv or "-d" in sys.argv)
47-
else logging.INFO
45+
level=(
46+
logging.DEBUG
47+
if ("--debug" in sys.argv or "-d" in sys.argv)
48+
else logging.INFO
49+
)
4850
)
4951

5052
parser.add_argument("-d", "--debug", action="store_true")

case_utils/case_sparql_select/__init__.py

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@
2929
Should a more complex query be necessary, an outer, wrapping SELECT query would let this script continue to function.
3030
"""
3131

32-
__version__ = "0.5.2"
32+
__version__ = "0.5.3"
3333

3434
import argparse
3535
import binascii
@@ -197,9 +197,11 @@ def main() -> None:
197197

198198
# Configure debug logging before running parse_args, because there could be an error raised before the construction of the argument parser.
199199
logging.basicConfig(
200-
level=logging.DEBUG
201-
if ("--debug" in sys.argv or "-d" in sys.argv)
202-
else logging.INFO
200+
level=(
201+
logging.DEBUG
202+
if ("--debug" in sys.argv or "-d" in sys.argv)
203+
else logging.INFO
204+
)
203205
)
204206

205207
parser.add_argument("-d", "--debug", action="store_true")

case_utils/case_validate/__init__.py

Lines changed: 15 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@
3232
details.)
3333
"""
3434

35-
__version__ = "0.5.0"
35+
__version__ = "0.6.0"
3636

3737
import argparse
3838
import logging
@@ -41,7 +41,7 @@
4141
import warnings
4242
from typing import Any, Dict, List, Optional, Tuple, Union
4343

44-
import pyshacl # type: ignore
44+
import pyshacl
4545
import rdflib
4646
from rdflib import Graph
4747

@@ -120,14 +120,14 @@ def validate(
120120
)
121121

122122
# Validate data graph against ontology graph.
123-
validate_result: Tuple[
124-
bool, Union[Exception, bytes, str, rdflib.Graph], str
125-
] = pyshacl.validate(
126-
data_graph,
127-
*args,
128-
ont_graph=ontology_graph,
129-
shacl_graph=ontology_graph,
130-
**kwargs,
123+
validate_result: Tuple[bool, Union[Exception, bytes, str, rdflib.Graph], str] = (
124+
pyshacl.validate(
125+
data_graph,
126+
*args,
127+
ont_graph=ontology_graph,
128+
shacl_graph=ontology_graph,
129+
**kwargs,
130+
)
131131
)
132132

133133
# Relieve RAM of the data graph after validation has run.
@@ -159,9 +159,11 @@ def main() -> None:
159159
# could be an error raised before the construction of the argument
160160
# parser.
161161
logging.basicConfig(
162-
level=logging.DEBUG
163-
if ("--debug" in sys.argv or "-d" in sys.argv)
164-
else logging.INFO
162+
level=(
163+
logging.DEBUG
164+
if ("--debug" in sys.argv or "-d" in sys.argv)
165+
else logging.INFO
166+
)
165167
)
166168

167169
# Add arguments specific to case_validate.

case_utils/inherent_uuid.py

Lines changed: 60 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -57,7 +57,7 @@
5757
>>> assert str(n_file_facet)[-36:] == str(n_file_facet_2)[-36:]
5858
"""
5959

60-
__version__ = "0.1.2"
60+
__version__ = "0.2.0"
6161

6262
import binascii
6363
import re
@@ -66,16 +66,16 @@
6666

6767
from rdflib import Literal, Namespace, URIRef
6868

69-
from case_utils.namespace import NS_UCO_CORE, NS_UCO_VOCABULARY, NS_XSD
69+
from case_utils.namespace import NS_UCO_CORE, NS_XSD
7070

71-
L_MD5 = Literal("MD5", datatype=NS_UCO_VOCABULARY.HashNameVocab)
72-
L_SHA1 = Literal("SHA1", datatype=NS_UCO_VOCABULARY.HashNameVocab)
73-
L_SHA256 = Literal("SHA256", datatype=NS_UCO_VOCABULARY.HashNameVocab)
74-
L_SHA3_256 = Literal("SHA3-256", datatype=NS_UCO_VOCABULARY.HashNameVocab)
75-
L_SHA3_512 = Literal("SHA3-512", datatype=NS_UCO_VOCABULARY.HashNameVocab)
76-
L_SHA384 = Literal("SHA384", datatype=NS_UCO_VOCABULARY.HashNameVocab)
77-
L_SHA512 = Literal("SHA512", datatype=NS_UCO_VOCABULARY.HashNameVocab)
78-
L_SSDEEP = Literal("SSDEEP", datatype=NS_UCO_VOCABULARY.HashNameVocab)
71+
L_MD5 = Literal("MD5")
72+
L_SHA1 = Literal("SHA1")
73+
L_SHA256 = Literal("SHA256")
74+
L_SHA3_256 = Literal("SHA3-256")
75+
L_SHA3_512 = Literal("SHA3-512")
76+
L_SHA384 = Literal("SHA384")
77+
L_SHA512 = Literal("SHA512")
78+
L_SSDEEP = Literal("SSDEEP")
7979

8080
# Key: hashMethod literal.
8181
# Value: Tuple.
@@ -96,6 +96,15 @@
9696
)
9797

9898

99+
def dictionary_entry_inherence_uuid(
100+
uco_object_uuid_namespace: uuid.UUID, key_name: str, *args: Any, **kwargs: Any
101+
) -> uuid.UUID:
102+
"""
103+
This function returns a UUIDv5 for dictionary entries, incorporating the key string's value.
104+
"""
105+
return uuid.uuid5(uco_object_uuid_namespace, key_name)
106+
107+
99108
def inherence_uuid(n_thing: URIRef, *args: Any, **kwargs: Any) -> uuid.UUID:
100109
"""
101110
This function returns a UUIDv5 for any OWL Thing, that can be used as a UUID Namespace in further `uuid.uuidv5` calls.
@@ -152,6 +161,47 @@ def facet_inherence_uuid(
152161
return uuid.uuid5(uco_object_inherence_uuid, str(n_facet_class))
153162

154163

164+
def get_dictionary_entry_uriref(
165+
n_dictionary: URIRef,
166+
n_dictionary_entry_class: URIRef,
167+
key_name: str,
168+
*args: Any,
169+
namespace: Namespace,
170+
**kwargs: Any
171+
) -> URIRef:
172+
"""
173+
:param namespace: An RDFLib Namespace object to use for prefixing the Dictionary IRI with a knowledge base prefix IRI.
174+
:type namespace rdflib.Namespace:
175+
176+
:param n_dictionary_entry_class: Assumed to be a "Proper Dictionary", as defined in UCO Issue 602.
177+
178+
References
179+
==========
180+
* https://github.com/ucoProject/UCO/issues/602
181+
182+
Examples
183+
========
184+
A dictionary has to have an entry with key "foo". What is the IRI of the dictionary entry?
185+
186+
>>> from case_utils.namespace import NS_UCO_TYPES
187+
>>> ns_kb = Namespace("http://example.org/kb/")
188+
>>> n_dictionary = ns_kb["Dictionary-eb7e68d8-94db-4071-86fa-a51a33dc4a97"]
189+
>>> n_dictionary_entry = get_dictionary_entry_uriref(n_dictionary, NS_UCO_TYPES.DictionaryEntry, "foo", namespace=ns_kb)
190+
>>> n_dictionary_entry
191+
rdflib.term.URIRef('http://example.org/kb/DictionaryEntry-6ce6b412-6a3a-5ebf-993a-9df2c80d2107')
192+
"""
193+
uco_object_uuid_namespace: uuid.UUID = inherence_uuid(n_dictionary)
194+
dictionary_entry_uuid = dictionary_entry_inherence_uuid(
195+
uco_object_uuid_namespace, key_name
196+
)
197+
198+
dictionary_entry_class_local_name = str(n_dictionary_entry_class).rsplit("/")[-1]
199+
200+
return namespace[
201+
dictionary_entry_class_local_name + "-" + str(dictionary_entry_uuid)
202+
]
203+
204+
155205
def get_facet_uriref(
156206
n_uco_object: URIRef,
157207
n_facet_class: URIRef,

0 commit comments

Comments
 (0)