diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000..3edb0b5 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,34 @@ +# Include any files or directories that you don't want to be copied to your +# container here (e.g., local build artifacts, temporary files, etc.). +# +# For more help, visit the .dockerignore file reference guide at +# https://docs.docker.com/engine/reference/builder/#dockerignore-file + +**/.DS_Store +**/__pycache__ +**/.venv +**/.classpath +**/.dockerignore +**/.env +**/.git +**/.gitignore +**/.project +**/.settings +**/.toolstarget +**/.vs +**/.vscode +**/*.*proj.user +**/*.dbmdl +**/*.jfm +**/bin +**/charts +**/docker-compose* +**/compose* +**/Dockerfile* +**/node_modules +**/npm-debug.log +**/obj +**/secrets.dev.yaml +**/values.dev.yaml +LICENSE +README.md diff --git a/.gitignore b/.gitignore index e7400da..bc5cbed 100644 --- a/.gitignore +++ b/.gitignore @@ -9,3 +9,4 @@ venv *log* *sh src/ +**/.DS_Store \ No newline at end of file diff --git a/Dockerfile b/Dockerfile index 0a705e7..9c65490 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,13 +1,26 @@ -FROM python:3.8-slim-buster -RUN apt-get update && apt-get install -y git -RUN apt-get install -y procps +ARG PYTHON_VERSION=3.9.13 +FROM python:${PYTHON_VERSION}-slim as base + +# Prevents Python from writing pyc files. +ENV PYTHONDONTWRITEBYTECODE=1 + +# Keeps Python from buffering stdout and stderr to avoid situations where +# the application crashes without emitting any logs due to buffering. +ENV PYTHONUNBUFFERED=1 WORKDIR /app COPY requirements.txt requirements.txt -RUN pip3 install -r requirements.txt +RUN pip install -r requirements.txt +# Copy the source code into the container. COPY . . -CMD "./script.sh" \ No newline at end of file +# Expose the port that the application listens on. +EXPOSE 5000 + +# Run the application. +#CMD gunicorn 'venv.lib.python3.11.site-packages.werkzeug.wsgi' --bind=0.0.0.0:5000 +#CMD gunicorn main:app --access-logfile ./server.log --timeout 300 +CMD python main.py \ No newline at end of file diff --git a/Dockerfile.old b/Dockerfile.old new file mode 100644 index 0000000..0a705e7 --- /dev/null +++ b/Dockerfile.old @@ -0,0 +1,13 @@ +FROM python:3.8-slim-buster + +RUN apt-get update && apt-get install -y git +RUN apt-get install -y procps + +WORKDIR /app + +COPY requirements.txt requirements.txt +RUN pip3 install -r requirements.txt + +COPY . . + +CMD "./script.sh" \ No newline at end of file diff --git a/compose.yaml b/compose.yaml new file mode 100644 index 0000000..b997f23 --- /dev/null +++ b/compose.yaml @@ -0,0 +1,7 @@ +services: + server: + build: + context: . + ports: + - 5000:5000 + restart: always diff --git a/import_one.py b/import_one.py index 9fe26d7..b1b3d4e 100644 --- a/import_one.py +++ b/import_one.py @@ -1,7 +1,7 @@ # configuration for pywikibot import sys -import pywikibot +from util.util import import_one # connect to the wikibase wikibase = pywikibot.Site("my", "my") @@ -19,13 +19,4 @@ # import a single item or property arg = sys.argv[1] print(f"Importing {arg}") -if arg.startswith("Q"): - print("before get") - wikidata_item = pywikibot.ItemPage(wikidata_repo, arg) - wikidata_item.get() - print("after get") - wikibase_importer.change_item(wikidata_item, wikibase_repo, True) -elif arg.startswith("P"): - wikidata_property = pywikibot.PropertyPage(wikidata_repo, arg) - wikidata_property.get() - wikibase_importer.change_property(wikidata_property, wikibase_repo, True) +import_one(arg) diff --git a/import_recent_changes.py b/import_recent_changes.py index 6994407..85664ae 100644 --- a/import_recent_changes.py +++ b/import_recent_changes.py @@ -27,8 +27,7 @@ identifier.get(wikibase_repo) print('Wikidata Item Identifier',identifier.itemIdentifier) -idSparql = IdSparql(app_config.get('wikibase', 'sparqlEndPoint'), identifier.itemIdentifier, identifier.propertyIdentifier) -idSparql.load() +idSparql = IdSparql() #grab all entities that changed recent = get_wikidata_changes(None, 15) diff --git a/main.py b/main.py new file mode 100644 index 0000000..7615661 --- /dev/null +++ b/main.py @@ -0,0 +1,91 @@ +import socket +import sys + +import requests +from flask import Flask, request +from flask_restful import Api, Resource +from flask_cors import CORS, cross_origin +from threading import Thread + +from util.util import import_one + +user_config = __import__("user-config") + +app = Flask(__name__) +CORS(app) + +api = Api(app) + +class Index(Resource): + def get(self): + return {"data": "Welcome to the index"} + +# class that imports all statements from Wikidata +class Sync(Resource): + def get(self): + q_id = request.args.get('q_id') + api_key = request.args.get('api_key') + if is_authorised(api_key): + import_one(q_id).getID() + payload = {"status_code": 200, "completed": True, "message": "Import process complete"} + else: + payload = {"status_code": 403, "completed": False, "message": "Unauthorised Access"} + return payload + +# import one +class ImportOne(Resource): + def get(self): + q_id = request.args.get('q_id') + api_key = request.args.get('api_key') + + if is_authorised(api_key): + response = import_one(q_id, import_statements = False).getID() + # print(response) + if response: + payload = {"status_code": 200, "message": "Import successful", "pid": response} + else: + payload = {"status_code": 500, + "message": "Import could not be completed"} + else: + payload = {"status_code": 403, "completed": False, "message": "Unathorised Access"} + return payload + + +class WikiDataQuery(Resource): + def get(self): + query_string = request.args.get('query_string') + query_type = request.args.get('query_type') + api_key = request.args.get('api_key') + + url = "https://www.wikidata.org/w/api.php?action=wbsearchentities&search=" + \ + query_string + "&format=json&errorformat=plaintext&language=en&uselang=en&type=" + query_type + + if is_authorised(api_key): + response = requests.get(url) + response = response.json() + if response: + payload = {"status_code": 200, "response": response} + else: + payload = {"status_code": 500, + "message": "Import could not be completed"} + else: + payload = {"status_code": 403, "completed": False, "message": "Unathorised Access"} + return payload + + +def is_authorised(api_key): + if str(user_config.apiKey) == api_key: + return True + else: + return False + + +# ROUTES +api.add_resource(Index, "/") +api.add_resource(Sync, "/sync") +api.add_resource(ImportOne, "/import-wikidata-item") +api.add_resource(WikiDataQuery, "/remote-wikidata-query") + + +if __name__ == '__main__': + app.run(host='0.0.0.0', debug=True) diff --git a/requirements.txt b/requirements.txt index 1968418..dc832e3 100644 --- a/requirements.txt +++ b/requirements.txt @@ -10,5 +10,10 @@ six==1.13.0 SPARQLWrapper==1.8.4 urllib3==1.25.7 +Flask~=2.0.2 +Flask-Restful==0.3.9 +Flask-Cors==3.0.10 pywikibot~=7.7.2 -mwparserfromhell>=0.5.0 \ No newline at end of file +mwparserfromhell>=0.5.0 +gunicorn==20.1.0 +werkzeug==2.3.7 \ No newline at end of file diff --git a/user-config.py b/user-config.py index 0476818..b056f43 100644 --- a/user-config.py +++ b/user-config.py @@ -1,4 +1,4 @@ -from pywikibot.config2 import usernames +from pywikibot.config import usernames user_families_paths = ['./config'] mylang = "wikidata" @@ -9,3 +9,4 @@ maxthrottle = 0 max_retries = 100 #verbose_output = True +apiKey = 123456 diff --git a/user-password.py b/user-password.py index e7b7567..c9dab66 100644 --- a/user-password.py +++ b/user-password.py @@ -1 +1 @@ -(u'admin', BotPassword(u'WikidataUpdater', u'BotPassword')) +(u'admin', BotPassword(u'WikidataUpdater', u'BotPassword')) \ No newline at end of file diff --git a/util/IdSparql.py b/util/IdSparql.py index 26433c6..864cc60 100644 --- a/util/IdSparql.py +++ b/util/IdSparql.py @@ -1,24 +1,42 @@ # this class makes the correspondence between Wikidata entities and entities in the Wikibase using the external # identifier for Wikidata +import pywikibot from SPARQLWrapper import SPARQLWrapper, JSON import configparser +from util.PropertyWikidataIdentifier import PropertyWikidataIdentifier + class IdSparql: - def __init__(self, endpoint, item_identifier, property_identifier): + _instance = None + + def __new__(class_, *args, **kwargs): + if not isinstance(class_._instance, class_): + class_._instance = object.__new__(class_, *args, **kwargs) + + return class_._instance + + def __init__(self): self.mapEntity = {} self.mapProperty = {} - self.endpoint = endpoint - self.item_identifier = item_identifier - self.property_identifier = property_identifier + wikibase = pywikibot.Site("my", "my") + wikibase_repo = wikibase.data_repository() + wikibase_repo.login() + identifier = PropertyWikidataIdentifier() + identifier.get(wikibase_repo) + self.item_identifier = identifier.itemIdentifier + self.property_identifier = identifier.propertyIdentifier self.app_config = configparser.ConfigParser() self.app_config.read('config/application.config.ini') + self.endpoint = self.app_config.get('wikibase', 'sparqlEndPoint') + self.load() def load(self): sparql = SPARQLWrapper(self.endpoint) query = """ select ?item ?id where { - ?item <""" + self.app_config.get('wikibase','propertyUri') + """/direct/""" + self.item_identifier + """> ?id + ?item <""" + self.app_config.get('wikibase', + 'propertyUri') + """/direct/""" + self.item_identifier + """> ?id } """ sparql.setQuery(query) @@ -26,12 +44,13 @@ def load(self): results = sparql.query().convert() for result in results['results']['bindings']: split = result['item']['value'].split('/') - id = split[len(split)-1] + id = split[len(split) - 1] if id.startswith('Q'): self.mapEntity[result['id']['value']] = id query = """ select ?item ?id where { - ?item <""" + self.app_config.get('wikibase','propertyUri') + """/direct/""" + self.property_identifier + """> ?id + ?item <""" + self.app_config.get('wikibase', + 'propertyUri') + """/direct/""" + self.property_identifier + """> ?id } """ sparql.setQuery(query) @@ -45,7 +64,7 @@ def load(self): else: print("This should not happen") - def get_id(self,id): + def get_id(self, id): if id.startswith("Q"): return self.mapEntity[id] elif id.startswith("P"): @@ -53,7 +72,7 @@ def get_id(self,id): else: raise NameError('This should not happen') - def save_id(self,id,new_id): + def save_id(self, id, new_id): if id.startswith("Q"): self.mapEntity[id] = str(new_id) elif id.startswith("P"): @@ -61,10 +80,10 @@ def save_id(self,id,new_id): else: raise NameError('This should not happen') - def contains_id(self,id): + def contains_id(self, id): if id.startswith("Q"): return id in self.mapEntity elif id.startswith("P"): return id in self.mapProperty else: - print('This should not happen') \ No newline at end of file + print('This should not happen') diff --git a/util/util.py b/util/util.py index adad7df..6e35500 100644 --- a/util/util.py +++ b/util/util.py @@ -20,11 +20,9 @@ def __init__(self, wikibase_repo, wikidata_repo): self.wikidata_repo = wikidata_repo self.identifier = PropertyWikidataIdentifier() self.identifier.get(wikibase_repo) - self.appConfig = configparser.ConfigParser() - self.appConfig.read('config/application.config.ini') - endpoint = self.appConfig.get('wikibase', 'sparqlEndPoint') - self.id = IdSparql(endpoint, self.identifier.itemIdentifier, self.identifier.propertyIdentifier) - self.id.load() + self.id = IdSparql() + self.app_config = configparser.ConfigParser() + self.app_config.read('config/application.config.ini') # transforms the json to an item def json_to_item(self, wikibase_repo, json_object): @@ -126,7 +124,7 @@ def diffLabels(self, wikidata_item, wikibase_item): # no update has been done on label, accept remote update mylabels[label] = wikidata_item.labels.get(label) else: - if self.appConfig.get('wikibase', 'overwriteLocalChanges').lower() == 'false': + if self.app_config.get('wikibase', 'overwriteLocalChanges').lower() == 'false': last_update_revision_on_label = self.get_last_label_update(revisions, label) if last_update_revision_on_label is None: # no update has been done on label, accept remote update @@ -1051,7 +1049,7 @@ def change_claims(self, wikidata_item, wikibase_item): break # print("User that added this claim ", revisions[edit_where_claim_was_added]['user']) - if revisions[edit_where_claim_was_added]['user'].lower() != self.appConfig.get('wikibase', 'user').lower(): + if revisions[edit_where_claim_was_added]['user'].lower() != self.app_config.get('wikibase', 'user').lower(): not_remove.append(claimToRemove) for c in not_remove: claims_to_remove.remove(c) @@ -1074,7 +1072,7 @@ def change_claims(self, wikidata_item, wikibase_item): wikidata_claim = c.toJSON() found_equal_value = False wikidata_property_id = wikidata_claim.get('mainsnak').get('property') - print(wikidata_property_id) + # print(wikidata_property_id) if wikibase_item.getID().startswith("Q") or wikibase_item.getID().startswith("P"): for wikibase_claims in wikibase_item.claims: for wikibase_c in wikibase_item.claims.get(wikibase_claims): @@ -1086,7 +1084,7 @@ def change_claims(self, wikidata_item, wikibase_item): True) if (claim_found_equal_value == True): found_equal_value = True - print(found_equal_value) + # print(found_equal_value) if found_equal_value == False: # print("This claim is added ", wikidata_claim) # import the property if it does not exist @@ -1213,6 +1211,31 @@ def change_property(self, wikidata_item, wikibase_repo, statements): self.change_claims(wikidata_item, wikibase_item) return wikibase_item +def import_one(arg, import_statements = True): + # connect to the wikibase + wikibase = pywikibot.Site("my", "my") + wikibase_repo = wikibase.data_repository() + wikibase_repo.login() + + # connect to wikidata + wikidata = pywikibot.Site("wikidata", "wikidata") + wikidata_repo = wikidata.data_repository() + + from util.util import WikibaseImporter + wikibase_importer = WikibaseImporter(wikibase_repo, wikidata_repo) + + # import a single item or property + print(f"Importing {arg}") + if arg.startswith("Q"): + print("before get") + wikidata_item = pywikibot.ItemPage(wikidata_repo, arg) + wikidata_item.get() + print("after get") + return wikibase_importer.change_item(wikidata_item, wikibase_repo, import_statements) + elif arg.startswith("P"): + wikidata_property = pywikibot.PropertyPage(wikidata_repo, arg) + wikidata_property.get() + return wikibase_importer.change_property(wikidata_property, wikibase_repo, import_statements) def chunks(l, n): """Yield successive n-sized chunks from l.""" diff --git a/wikibasesync-extension/extension.json b/wikibasesync-extension/extension.json new file mode 100644 index 0000000..7d152a1 --- /dev/null +++ b/wikibasesync-extension/extension.json @@ -0,0 +1,62 @@ +{ + "name": "WikibaseSync", + "author": [ + "mez" + ], + "url": "https://www.mediawiki.org/wiki/Extension:BoilerPlate", + "descriptionmsg": "This is a Wikibase extentsion to use and sync entities between Wikibases", + "license-name": "GPL-2.0-or-later", + "type": "other", + "requires": { + "MediaWiki": ">= 1.35.0" + }, + "AutoloadNamespaces": { + "MediaWiki\\Extension\\WikibaseSync\\": "src/" + }, + "config": { + "WikibaseSyncUrl": { + "description": "wikibasesync url", + "value": true + }, + "apiKey": { + "description": "API Key to wikibase sync", + "value": true + }, + "PID": { + "description": "PID", + "value": true + }, + "QID": { + "description": "QID", + "value": true + } + }, + "HookHandlers": { + "WikibaseSyncHooks": { + "class": "MediaWiki\\Extension\\WikibaseSync\\Hooks" + } + }, + "Hooks": { + "BeforePageDisplay": "WikibaseSyncHooks", + "ResourceLoaderGetConfigVars": "WikibaseSyncHooks" + }, + "MessagesDirs": { + "BoilerPlate": [ + "i18n" + ] + }, + "ResourceModules": { + "ext.wikibaseSync": { + "localBasePath": "resources/ext.wikibaseSync", + "remoteExtPath": "WikibaseSync/resources/ext.wikibaseSync", + "dependencies": ["wikibase.view.ControllerViewFactory"], + "styles": [], + "packageFiles": [ + "init.js" + ], + "messages": [] + } + }, + "manifest_version": 2 +} + diff --git a/wikibasesync-extension/resources/ext.wikibaseSync/init.js b/wikibasesync-extension/resources/ext.wikibaseSync/init.js new file mode 100644 index 0000000..22456e9 --- /dev/null +++ b/wikibasesync-extension/resources/ext.wikibaseSync/init.js @@ -0,0 +1,427 @@ +/** + * @class mw.boilerPlate + * @singleton + */ + +var conf = mw.config.get('wgVisualEditor'); +mw.boilerPlate = {}; +var wikidataResults = []; +var WIKIBASE_SYNC_URL = conf.wikibasesync_server_url; +var API_KEY = conf.api_key; +var SERVER = conf.Server; + +var datamodel = require('wikibase.datamodel'); + +$.wikibase.statementgroupview.prototype._createStatementlistview = function () { + var self = this, + prefix; + + var $statementlistview = this.element.find('.wikibase-statementlistview'); + + if (!$statementlistview.length) { + $statementlistview = $('
importing...
'; + $(cloningEl).insertAfter(self.focused); + //remote source, clone + + //api call + var full_endpoint = WIKIBASE_SYNC_URL + '/import-wikidata-item?q_id=' + id + "&api_key=" + API_KEY; + $.ajax({ + url: full_endpoint, + crossDomain: true, + headers: { + // "accept": "application/json", + "Access-Control-Allow-Origin": "*", + "Access-Control-Request-Headers3": "x-requested-with" + }, + success: function (data) { + console.log(data); + if (data.pid) { + $(self.focused).siblings('p').remove(); + id = data.pid; + + if (self.options.type.toLowerCase() == "property") { + self._selectedEntity.id = id; + self._selectedEntity.title = "Property:" + id; + self._selectedEntity.repository = "local"; + self._selectedEntity.url = SERVER + "/wiki/Property:" + id; + self._selectedEntity.pageid = null; + } else if (self.options.type.toLowerCase() == "item") { + self._selectedEntity.id = id; + self._selectedEntity.title = id; + self._selectedEntity.repository = "local"; + self._selectedEntity.url = SERVER + "/wiki/" + id; + self._selectedEntity.pageid = null; + } + + self._trigger('selected', null, [id]); + } + } + }); + } else { + this._trigger('selected', null, [id]); + } + } +}; + + +// overwrite search result default behaviour sec +$.wikibase.entitysearch.prototype._initMenu = function (ooMenu) { + var PARENT = $.wikibase.entityselector; + PARENT.prototype._initMenu.apply(this, arguments); + + if (this.options.suggestionsPlaceholder) { + ooMenu.option('customItems').unshift(this.options.suggestionsPlaceholder); + } + + ooMenu.element.addClass('wikibase-entitysearch-list'); + + $(ooMenu) + .off('selected') + .on('selected.entitysearch', function (event, item) { + if (event.originalEvent + // && /^key/.test( event.originalEvent.type ) + && !(item instanceof $.ui.ooMenu.CustomItem) + ) { + var itemEntityStub = item.getEntityStub(); + if (itemEntityStub) { + if (itemEntityStub.repository.toLowerCase() === "wikidata") { + + $("a[tabindex='-1']").click(function (e) { + e.preventDefault(); + }); + + //api call + var full_endpoint = WIKIBASE_SYNC_URL + '/import-wikidata-item?q_id=' + itemEntityStub.id + "&api_key=" + API_KEY; + $.ajax({ + url: full_endpoint, + crossDomain: true, + //async: false, + //global: false, + headers: { + "Access-Control-Allow-Origin": "*", + "Access-Control-Request-Headers3": "x-requested-with" + }, + success: function (data) { + console.log("response: ", data); + //window.history.back(); + window.location.replace(SERVER + '/wiki/item:' + data.pid); + + } + }); + } else { + window.location.href = item.getEntityStub().url; + } + } + } + }); + + return ooMenu; +} + +function createSyncButtons(_context) { + btn = $("") + btn2 = $("") + //btn.css("margin-top", ".5rem"); + btn.css("display", "block"); + //btn.css("margin-left", "5px"); + btn.css("color", "#0645ad"); + btn.css("background-color", "white"); + btn.css("border-color", "#0645ad"); + btn.css("border-radius", "5px"); + + //btn2.css("margin-left", "5px"); + btn2.css("margin-top", ".3rem"); + btn2.css("display", "block"); + btn2.css("color", "#0645ad"); + btn2.css("background-color", "white"); + btn2.css("border-color", "#0645ad"); + btn2.css("border-radius", "5px"); + + _context.$propertyLabel.append(btn); + _context.$propertyLabel.append(btn2); + + var _wikibasePropertyValue = _context.value().getItemContainer()._items[0]._claim._mainSnak._value._value; + self.wikibasePropertyValue = _wikibasePropertyValue; + + //console.log(_context.wikibasePropertyKey,_context.wikibasePropertyValue); + + btn.on('click', function () { + btn.text("syncing..."); + //api call + full_endpoint = WIKIBASE_SYNC_URL + '/sync?q_id=' + self.wikibasePropertyValue + "&api_key=" + API_KEY; + $.ajax({ + url: full_endpoint, + crossDomain: true, + headers: { + // "accept": "application/json", + "Access-Control-Allow-Origin": "*", + "Access-Control-Request-Headers3": "x-requested-with" + }, + success: function (data) { + if (data) { + location.reload(); + } + + + } + }); + }); + + btn2.on('click', function () { + btn2.text("syncing..."); + //api call + var full_endpoint = WIKIBASE_SYNC_URL + '/import-wikidata-item?q_id=' + self.wikibasePropertyValue + "&api_key=" + API_KEY; + $.ajax({ + url: full_endpoint, + crossDomain: true, + headers: { + // "accept": "application/json", + "Access-Control-Allow-Origin": "*", + "Access-Control-Request-Headers3": "x-requested-with" + }, + success: function (data) { + if (data) { + //console.log(data); + location.reload(); + } + + + } + }); + }); +}; + +function createSyncButton(_context) { + btn = $("") + btn.css("display", "block"); + btn.css("color", "#0645ad"); + btn.css("background-color", "white"); + btn.css("border-color", "#0645ad"); + btn.css("border-radius", "5px"); + + _context.$propertyLabel.append(btn); + + var _wikibasePropertyValue = _context.value().getItemContainer()._items[0]._claim._mainSnak._value._value; + self.wikibasePropertyValue = _wikibasePropertyValue; + + btn.on('click', function () { + btn.text("syncing..."); + btn.prop("disabled", true); + //disable other sync button + $('#wbsynclabel').prop("disabled", true); + //api call + full_endpoint = WIKIBASE_SYNC_URL + '/sync?q_id=' + self.wikibasePropertyValue + "&api_key=" + API_KEY; + $.ajax({ + url: full_endpoint, + crossDomain: true, + headers: { + // "accept": "application/json", + "Access-Control-Allow-Origin": "*", + "Access-Control-Request-Headers3": "x-requested-with" + }, + success: function (data) { + if (data) { + console.log(data); + location.reload(); + } + + + } + }); + }); +}; + +function createSyncLabelButton(_context) { + btn2 = $("") + //btn2.css("margin-left", "5px"); + btn2.css("margin-top", ".3rem"); + btn2.css("display", "block"); + btn2.css("color", "#0645ad"); + btn2.css("background-color", "white"); + btn2.css("border-color", "#0645ad"); + btn2.css("border-radius", "5px"); + + _context.$propertyLabel.append(btn2); + + var _wikibasePropertyValue = _context.value().getItemContainer()._items[0]._claim._mainSnak._value._value; + self.wikibasePropertyValue = _wikibasePropertyValue; + + //console.log(_context.wikibasePropertyKey,_context.wikibasePropertyValue); + + btn2.on('click', function () { + btn2.text("syncing..."); + btn2.prop("disabled", true); + //disable other sync button + $('#wbsync').prop("disabled", true); + //api call + var full_endpoint = WIKIBASE_SYNC_URL + '/import-wikidata-item?q_id=' + self.wikibasePropertyValue + "&api_key=" + API_KEY; + $.ajax({ + url: full_endpoint, + crossDomain: true, + headers: { + // "accept": "application/json", + "Access-Control-Allow-Origin": "*", + "Access-Control-Request-Headers3": "x-requested-with" + }, + success: function (data) { + if (data) { + console.log(data); + location.reload(); + } + + + } + }); + }); +}; + + +function removeExistingRecordsFromWikidataResults(wikidataResults, localResults) { + if (!wikidataResults || !localResults) { + return wikidataResults; + } + var updatedResults = []; + localResults.concat(wikidataResults).forEach(function (element) { + var index = updatedResults.findIndex(function (x) { + return x.label.toLowerCase().trim() == element.label.toLowerCase().trim() && x.description.toLowerCase().trim() == element.description.toLowerCase().trim() + }) + if (index == -1) { + if (element.repository.toLowerCase() !== "local") { + element.label = "[clone from wikidata:] " + element.label + } + updatedResults.push(element); + } + }) + return updatedResults + +}; + diff --git a/wikibasesync-extension/src/Hooks.php b/wikibasesync-extension/src/Hooks.php new file mode 100644 index 0000000..3aa595b --- /dev/null +++ b/wikibasesync-extension/src/Hooks.php @@ -0,0 +1,52 @@ +getUser(); + if(!empty($user) && $user->getId() != 0){ + //var_dump(gettype($user)); + //var_dump($user->getId()); + $out->addModules( 'ext.wikibaseSync' ); + } + $config = $out->getConfig(); + } + + public function onResourceLoaderGetConfigVars( array &$vars, $string, Config $config ): void { + $vars['wgVisualEditor'] =[ + "wikibasesync_server_url" => $config->get( 'WikibaseSyncUrl' ), + "api_key" => $config->get('ApiKey'), + "PID" => $config->get( 'PID' ), + "QID" => $config->get( 'QID' ), + "Server" => $config->get( 'Server' ), + ]; + } +}