Skip to content

Commit

Permalink
Encode dataclasses properly, test generated JSON with actual dataclas…
Browse files Browse the repository at this point in the history
…s elements
  • Loading branch information
EnricoMi committed Mar 2, 2022
1 parent 4583bc7 commit 11c02e5
Show file tree
Hide file tree
Showing 3 changed files with 56 additions and 11 deletions.
14 changes: 14 additions & 0 deletions python/publish/json_encoder.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
import json
import dataclasses


class JSONEncoder(json.JSONEncoder):

def __init__(self, *args, indent=None, **kwargs):
kwargs['ensure_ascii'] = False
super(JSONEncoder, self).__init__(*args, indent=indent, **kwargs)

def default(self, o):
if dataclasses.is_dataclass(o):
return dataclasses.asdict(o)
return super().default(o)
14 changes: 10 additions & 4 deletions python/publish/publisher.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import json
import logging
import os
import re
from dataclasses import dataclass
from typing import List, Any, Optional, Tuple, Mapping, Dict
Expand All @@ -18,6 +19,7 @@
Annotation, SomeTestChanges
from publish import logger
from publish.github_action import GithubAction
from publish.json_encoder import JSONEncoder
from publish.unittestresults import UnitTestCaseResults, UnitTestRunResults, get_stats_delta


Expand Down Expand Up @@ -266,11 +268,15 @@ def publish_check(self,

def publish_json(self, data: Dict[str, Any]):
if self._settings.json_file:
with open(self._settings.json_file, 'wt', encoding='utf-8') as w:
try:
with open(self._settings.json_file, 'wt', encoding='utf-8') as w:
json.dump(data, w, ensure_ascii=False, cls=JSONEncoder)
except Exception as e:
self._gha.error(f'Failed to write JSON file {self._settings.json_file}: {str(e)}')
try:
json.dump(data, w, ensure_ascii=False)
except Exception as e:
self._gha.error(f'Failed to write JSON file {self._settings.json_file}: {str(e)}')
os.unlink(self._settings.json_file)
except:
pass

# replace some large fields with their lengths
if data.get('stats', {}).get('errors') is not None:
Expand Down
39 changes: 32 additions & 7 deletions python/test/test_publisher.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
import os
import tempfile
import unittest
from collections.abc import Collection
from datetime import datetime, timezone
import os

import github.CheckRun
import mock
Expand Down Expand Up @@ -1193,20 +1193,45 @@ def test_publish_json(self):
data = dict(
key='value',
dict=dict(list=[1, 2, 3]),
stats=dict(errors=[1, 2, 3]),
stats_with_delta=dict(errors=[1.0, 2.0, 3.0]),
annotations=['one', 'two', 'three']
stats=dict(errors=[ParseError('file', 'message', 1, 2)]),
stats_with_delta=dict(errors=[ParseError('file', 'message', 1, 2), ParseError('file2', 'message2', 2, 4)]),
annotations=[Annotation(
path='path',
start_line=1,
end_line=2,
start_column=3,
end_column=4,
annotation_level='failure',
message='message',
title=f'Error processing result file',
raw_details='file'
)]
)
publisher.publish_json(data)
gha.error.assert_not_called()

# assert the file
with open(filepath, encoding='utf-8') as r:
actual = r.read()
self.assertEqual(
'{'
'"key": "value", '
'"dict": {"list": [1, 2, 3]}, '
'"stats": {"errors": [{"file": "file", "message": "message", "line": 1, "column": 2}]}, '
'"stats_with_delta": {"errors": [{"file": "file", "message": "message", "line": 1, "column": 2}, {"file": "file2", "message": "message2", "line": 2, "column": 4}]}, '
'"annotations": [{"path": "path", "start_line": 1, "end_line": 2, "start_column": 3, "end_column": 4, "annotation_level": "failure", "message": "message", "title": "Error processing result file", "raw_details": "file"}]'
'}',
actual
)

# data is being sent to GH action output 'json'
# some list fields are replaced by their length
expected = dict(
key='value',
dict=dict(list=[1, 2, 3]),
stats=dict(errors=3),
stats_with_delta=dict(errors=3),
annotations=3
stats=dict(errors=1),
stats_with_delta=dict(errors=2),
annotations=1
)
gha.set_output.assert_called_once_with('json', json.dumps(expected))

Expand Down

0 comments on commit 11c02e5

Please sign in to comment.