From 5b2d880b89e39774cb1fb7641290ac0f37d7dbae Mon Sep 17 00:00:00 2001 From: Bishoy Boktor Date: Thu, 12 Aug 2021 20:20:58 +0000 Subject: [PATCH 01/10] Rename all elastic/elasticsearch/kibana references and clean up any remaining x-pack resources Signed-off-by: Bishoy Boktor --- .ci/run-opensearch.sh | 113 + .dockerignore | 2 +- .gitignore | 4 +- .npmignore | 6 +- README.md | 2 +- api/index.js | 84 +- api/kibana.d.ts | 272 - api/new.d.ts | 2 +- api/requestParams.d.ts | 6 +- api/types.d.ts | 38 +- api/utils.js | 2 +- docs/examples/asStream.asciidoc | 6 +- docs/examples/bulk.asciidoc | 2 +- docs/examples/exists.asciidoc | 2 +- docs/examples/get.asciidoc | 2 +- docs/examples/ignore.asciidoc | 2 +- docs/examples/msearch.asciidoc | 2 +- docs/examples/proxy/README.md | 25 +- docs/examples/proxy/api/autocomplete.js | 6 +- docs/examples/proxy/api/delete.js | 4 +- docs/examples/proxy/api/index.js | 4 +- docs/examples/proxy/api/search.js | 6 +- docs/examples/proxy/package.json | 2 +- .../proxy/utils/prepare-elasticsearch.js | 7 +- docs/examples/reindex.asciidoc | 2 +- docs/examples/scroll.asciidoc | 6 +- docs/examples/search.asciidoc | 6 +- docs/examples/sql.asciidoc | 64 - docs/examples/sql.query.asciidoc | 64 - docs/examples/suggest.asciidoc | 2 +- docs/examples/transport.request.asciidoc | 6 +- docs/examples/typescript.asciidoc | 2 +- docs/examples/update-by-query.asciidoc | 2 +- docs/examples/update.asciidoc | 4 +- docs/examples/update_by_query.asciidoc | 2 +- index.d.ts | 4 +- index.js | 29 +- lib/Connection.d.ts | 2 +- lib/Connection.js | 12 +- lib/Helpers.d.ts | 6 +- lib/Helpers.js | 12 +- lib/Serializer.d.ts | 2 +- lib/Serializer.js | 4 +- lib/Transport.d.ts | 14 +- lib/Transport.js | 46 +- lib/errors.d.ts | 20 +- lib/errors.js | 26 +- lib/pool/BaseConnectionPool.js | 12 +- lib/pool/ConnectionPool.js | 8 +- package.json | 9 +- scripts/download-artifacts.js | 24 +- scripts/es-docker.sh | 38 - scripts/kibana-docker.sh | 8 - scripts/release-canary.js | 6 +- .../{clone-es.js => clone-opensearch.js} | 36 +- scripts/utils/generateDocs.js | 10 +- scripts/utils/generateMain.js | 22 +- scripts/utils/index.js | 2 +- scripts/wait-cluster.sh | 4 +- test/acceptance/events-order.test.js | 2 +- test/acceptance/observability.test.js | 12 +- test/acceptance/product-check.test.js | 32 +- test/acceptance/resurrect.test.js | 6 +- test/benchmarks/macro/complex.bench.js | 2 +- test/benchmarks/macro/simple.bench.js | 2 +- test/benchmarks/suite.js | 16 +- test/fixtures/stackoverflow.ndjson | 68 +- test/integration/README.md | 16 +- test/integration/index.js | 8 +- test/integration/test-runner.js | 10 +- test/types/client-options.test-d.ts | 24 +- test/types/connection-pool.test-d.ts | 8 +- test/types/connection.test-d.ts | 2 +- test/types/errors.test-d.ts | 2 +- test/types/helpers.test-d.ts | 38 +- test/types/kibana.test-d.ts | 127 - test/types/new-types.test-d.ts | 20 +- test/types/transport.test-d.ts | 2 +- test/unit/client.test.js | 44 +- test/unit/connection-pool.test.js | 8 +- test/unit/connection.test.js | 2 +- test/unit/errors.test.js | 20 +- test/unit/esm/index.mjs | 2 +- test/unit/events.test.js | 10 +- test/unit/transport.test.js | 140 +- test/utils/buildCluster.js | 2 +- test/utils/buildServer.js | 2 +- test/utils/index.js | 2 +- yarn.lock | 4519 +++++++++++++++++ 89 files changed, 5158 insertions(+), 1107 deletions(-) create mode 100755 .ci/run-opensearch.sh delete mode 100644 api/kibana.d.ts delete mode 100644 docs/examples/sql.asciidoc delete mode 100644 docs/examples/sql.query.asciidoc delete mode 100755 scripts/es-docker.sh delete mode 100755 scripts/kibana-docker.sh rename scripts/utils/{clone-es.js => clone-opensearch.js} (79%) delete mode 100644 test/types/kibana.test-d.ts create mode 100644 yarn.lock diff --git a/.ci/run-opensearch.sh b/.ci/run-opensearch.sh new file mode 100755 index 000000000..7b33b7aab --- /dev/null +++ b/.ci/run-opensearch.sh @@ -0,0 +1,113 @@ +#!/usr/bin/env bash +# +# Launch one or more OpenSearch nodes via the Docker image, +# to form a cluster suitable for running the REST API tests. +# +# Export the STACK_VERSION variable, eg. '8.0.0-SNAPSHOT'. +# Export the TEST_SUITE variable. +# Export the NUMBER_OF_NODES variable to start more than 1 node + +# Version 1.4.0 +# - Initial version of the run-opensearch.sh script +# - Deleting the volume should not dependent on the container still running +# - Fixed `ES_JAVA_OPTS` config +# - Moved to STACK_VERSION and TEST_VERSION +# - Refactored into functions and imports +# - Support NUMBER_OF_NODES +# - Added 5 retries on docker pull for fixing transient network errors +# - Added flags to make local CCR configurations work +# - Added action.destructive_requires_name=false as the default will be true in v8 +# - Added ingest.geoip.downloader.enabled=false as it causes false positives in testing + +script_path=$(dirname $(realpath -s $0)) +source $script_path/functions/imports.sh +set -euo pipefail + +echo -e "\033[34;1mINFO:\033[0m Take down node if called twice with the same arguments (DETACH=true) or on seperate terminals \033[0m" +cleanup_node $os_node_name + +master_node_name=${os_node_name} +cluster_name=${moniker}${suffix} + +declare -a volumes +environment=($(cat <<-END + --env node.name=$os_node_name + --env cluster.name=$cluster_name + --env cluster.initial_master_nodes=$master_node_name + --env discovery.seed_hosts=$master_node_name + --env cluster.routing.allocation.disk.threshold_enabled=false + --env bootstrap.memory_lock=true + --env node.attr.testattr=test + --env path.repo=/tmp + --env repositories.url.allowed_urls=http://snapshot.test* + --env action.destructive_requires_name=false + --env ingest.geoip.downloader.enabled=false +END +)) + volumes+=($(cat <<-END + --volume $ssl_cert:/usr/share/opensearch/config/certs/testnode.crt + --volume $ssl_key:/usr/share/opensearch/config/certs/testnode.key + --volume $ssl_ca:/usr/share/opensearch/config/certs/ca.crt +END +)) +fi + +cert_validation_flags="" + +# Pull the container, retry on failures up to 5 times with +# short delays between each attempt. Fixes most transient network errors. +docker_pull_attempts=0 +until [ "$docker_pull_attempts" -ge 5 ] +do + docker pull docker.opensearch.co/opensearch/"$opensearch_container" && break + docker_pull_attempts=$((docker_pull_attempts+1)) + echo "Failed to pull image, retrying in 10 seconds (retry $docker_pull_attempts/5)..." + sleep 10 +done + +NUMBER_OF_NODES=${NUMBER_OF_NODES-1} +http_port=9200 +for (( i=0; i<$NUMBER_OF_NODES; i++, http_port++ )); do + node_name=${os_node_name}$i + node_url=${external_opensearch_url/9200/${http_port}}$i + if [[ "$i" == "0" ]]; then node_name=$os_node_name; fi + environment+=($(cat <<-END + --env node.name=$node_name +END +)) + echo "$i: $http_port $node_url " + volume_name=${node_name}-${suffix}-data + volumes+=($(cat <<-END + --volume $volume_name:/usr/share/opensearch/data${i} +END +)) + + # make sure we detach for all but the last node if DETACH=false (default) so all nodes are started + local_detach="true" + if [[ "$i" == "$((NUMBER_OF_NODES-1))" ]]; then local_detach=$DETACH; fi + echo -e "\033[34;1mINFO:\033[0m Starting container $node_name \033[0m" + set -x + docker run \ + --name "$node_name" \ + --network "$network_name" \ + --env "ES_JAVA_OPTS=-Xms1g -Xmx1g" \ + "${environment[@]}" \ + "${volumes[@]}" \ + --publish "$http_port":9200 \ + --ulimit nofile=65536:65536 \ + --ulimit memlock=-1:-1 \ + --detach="$local_detach" \ + --health-cmd="curl $cert_validation_flags --fail $opensearch_url/_cluster/health || exit 1" \ + --health-interval=2s \ + --health-retries=20 \ + --health-timeout=2s \ + --rm \ + docker.opensearch.co/opensearch/"$opensearch_container"; + + set +x + if wait_for_container "$os_node_name" "$network_name"; then + echo -e "\033[32;1mSUCCESS:\033[0m Running on: $node_url\033[0m" + fi + +done + diff --git a/.dockerignore b/.dockerignore index 54eb2a95a..a7788423d 100644 --- a/.dockerignore +++ b/.dockerignore @@ -1,5 +1,5 @@ node_modules npm-debug.log test/benchmarks -elasticsearch +opensearch .git diff --git a/.gitignore b/.gitignore index 6de1b46bc..9e95a838b 100644 --- a/.gitignore +++ b/.gitignore @@ -50,8 +50,8 @@ jspm_packages package-lock.json -# elasticsearch repo or binary files -elasticsearch* +# opensearch repo or binary files +opensearch* test/benchmarks/macro/fixtures/* diff --git a/.npmignore b/.npmignore index 9ef569a66..54ae7742a 100644 --- a/.npmignore +++ b/.npmignore @@ -47,8 +47,8 @@ jspm_packages package-lock.json -# elasticsearch repo or binary files -elasticsearch* +# opensearch repo or binary files +opensearch* # Generated typings, we don't commit them # because we should copy them in the main .d.ts file @@ -72,5 +72,5 @@ CODE_OF_CONDUCT.md CONTRIBUTING.md # CANARY-PACKAGE -api/kibana.d.ts +api/opensearch_dashboards.d.ts # /CANARY-PACKAGE diff --git a/README.md b/README.md index 30fcec3fa..0b0409230 100644 --- a/README.md +++ b/README.md @@ -10,7 +10,7 @@ OpenSearch Node.js client ## Welcome! -**opensearch-js** is [a community-driven, open source fork](https://aws.amazon.com/blogs/opensource/introducing-opensearch/) of elasticsearch-js licensed under the [Apache v2.0 License](LICENSE.txt). For more information, see [opensearch.org](https://opensearch.org/). +**opensearch-js** is [a community-driven, open source fork](https://aws.amazon.com/blogs/opensource/introducing-opensearch/) of opensearch-js licensed under the [Apache v2.0 License](LICENSE.txt). For more information, see [opensearch.org](https://opensearch.org/). ## Project Resources diff --git a/api/index.js b/api/index.js index 47fa621c4..0f09e605a 100644 --- a/api/index.js +++ b/api/index.js @@ -92,7 +92,7 @@ const kShutdown = Symbol('Shutdown') const kSnapshot = Symbol('Snapshot') const kTasks = Symbol('Tasks') -function ESAPI (opts) { +function OSAPI (opts) { this[kConfigurationError] = opts.ConfigurationError this[kCat] = null this[kCluster] = null @@ -106,47 +106,47 @@ function ESAPI (opts) { this[kTasks] = null } -ESAPI.prototype.bulk = bulkApi -ESAPI.prototype.clearScroll = clearScrollApi -ESAPI.prototype.count = countApi -ESAPI.prototype.create = createApi -ESAPI.prototype.delete = deleteApi -ESAPI.prototype.deleteByQuery = deleteByQueryApi -ESAPI.prototype.deleteByQueryRethrottle = deleteByQueryRethrottleApi -ESAPI.prototype.deleteScript = deleteScriptApi -ESAPI.prototype.exists = existsApi -ESAPI.prototype.existsSource = existsSourceApi -ESAPI.prototype.explain = explainApi -ESAPI.prototype.fieldCaps = fieldCapsApi -ESAPI.prototype.get = getApi -ESAPI.prototype.getScript = getScriptApi -ESAPI.prototype.getScriptContext = getScriptContextApi -ESAPI.prototype.getScriptLanguages = getScriptLanguagesApi -ESAPI.prototype.getSource = getSourceApi -ESAPI.prototype.index = indexApi -ESAPI.prototype.info = infoApi -ESAPI.prototype.mget = mgetApi -ESAPI.prototype.msearch = msearchApi -ESAPI.prototype.msearchTemplate = msearchTemplateApi -ESAPI.prototype.mtermvectors = mtermvectorsApi -ESAPI.prototype.ping = pingApi -ESAPI.prototype.putScript = putScriptApi -ESAPI.prototype.rankEval = rankEvalApi -ESAPI.prototype.reindex = reindexApi -ESAPI.prototype.reindexRethrottle = reindexRethrottleApi -ESAPI.prototype.renderSearchTemplate = renderSearchTemplateApi -ESAPI.prototype.scriptsPainlessExecute = scriptsPainlessExecuteApi -ESAPI.prototype.scroll = scrollApi -ESAPI.prototype.search = searchApi -ESAPI.prototype.searchShards = searchShardsApi -ESAPI.prototype.searchTemplate = searchTemplateApi -ESAPI.prototype.termsEnum = termsEnumApi -ESAPI.prototype.termvectors = termvectorsApi -ESAPI.prototype.update = updateApi -ESAPI.prototype.updateByQuery = updateByQueryApi -ESAPI.prototype.updateByQueryRethrottle = updateByQueryRethrottleApi +OSAPI.prototype.bulk = bulkApi +OSAPI.prototype.clearScroll = clearScrollApi +OSAPI.prototype.count = countApi +OSAPI.prototype.create = createApi +OSAPI.prototype.delete = deleteApi +OSAPI.prototype.deleteByQuery = deleteByQueryApi +OSAPI.prototype.deleteByQueryRethrottle = deleteByQueryRethrottleApi +OSAPI.prototype.deleteScript = deleteScriptApi +OSAPI.prototype.exists = existsApi +OSAPI.prototype.existsSource = existsSourceApi +OSAPI.prototype.explain = explainApi +OSAPI.prototype.fieldCaps = fieldCapsApi +OSAPI.prototype.get = getApi +OSAPI.prototype.getScript = getScriptApi +OSAPI.prototype.getScriptContext = getScriptContextApi +OSAPI.prototype.getScriptLanguages = getScriptLanguagesApi +OSAPI.prototype.getSource = getSourceApi +OSAPI.prototype.index = indexApi +OSAPI.prototype.info = infoApi +OSAPI.prototype.mget = mgetApi +OSAPI.prototype.msearch = msearchApi +OSAPI.prototype.msearchTemplate = msearchTemplateApi +OSAPI.prototype.mtermvectors = mtermvectorsApi +OSAPI.prototype.ping = pingApi +OSAPI.prototype.putScript = putScriptApi +OSAPI.prototype.rankEval = rankEvalApi +OSAPI.prototype.reindex = reindexApi +OSAPI.prototype.reindexRethrottle = reindexRethrottleApi +OSAPI.prototype.renderSearchTemplate = renderSearchTemplateApi +OSAPI.prototype.scriptsPainlessExecute = scriptsPainlessExecuteApi +OSAPI.prototype.scroll = scrollApi +OSAPI.prototype.search = searchApi +OSAPI.prototype.searchShards = searchShardsApi +OSAPI.prototype.searchTemplate = searchTemplateApi +OSAPI.prototype.termsEnum = termsEnumApi +OSAPI.prototype.termvectors = termvectorsApi +OSAPI.prototype.update = updateApi +OSAPI.prototype.updateByQuery = updateByQueryApi +OSAPI.prototype.updateByQueryRethrottle = updateByQueryRethrottleApi -Object.defineProperties(ESAPI.prototype, { +Object.defineProperties(OSAPI.prototype, { cat: { get () { if (this[kCat] === null) { @@ -251,4 +251,4 @@ Object.defineProperties(ESAPI.prototype, { update_by_query_rethrottle: { get () { return this.updateByQueryRethrottle } } }) -module.exports = ESAPI +module.exports = OSAPI diff --git a/api/kibana.d.ts b/api/kibana.d.ts deleted file mode 100644 index 6e5877203..000000000 --- a/api/kibana.d.ts +++ /dev/null @@ -1,272 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -/// - -import { - ClientOptions, - ConnectionPool, - Serializer, - Transport, - errors, - RequestEvent, - ResurrectEvent, - ApiError -} from '../index' -import Helpers from '../lib/Helpers' -import { - ApiResponse, - TransportRequestPromise, - TransportRequestParams, - TransportRequestOptions -} from '../lib/Transport' -import * as T from './types' - -/** - * We are still working on this type, it will arrive soon. - * If it's critical for you, please open an issue. - * https://github.com/opensearch-project/opensearch-js - */ -type TODO = Record - -// Extend API -interface ClientExtendsCallbackOptions { - ConfigurationError: errors.ConfigurationError, - makeRequest(params: TransportRequestParams, options?: TransportRequestOptions): Promise | void; - result: { - body: null, - statusCode: null, - headers: null, - warnings: null - } -} -declare type extendsCallback = (options: ClientExtendsCallbackOptions) => any; -// /Extend API - -interface KibanaClient { - connectionPool: ConnectionPool - transport: Transport - serializer: Serializer - extend(method: string, fn: extendsCallback): void - extend(method: string, opts: { force: boolean }, fn: extendsCallback): void; - helpers: Helpers - child(opts?: ClientOptions): KibanaClient - close(): Promise; - emit(event: string | symbol, ...args: any[]): boolean; - on(event: 'request', listener: (err: ApiError, meta: RequestEvent) => void): this; - on(event: 'response', listener: (err: ApiError, meta: RequestEvent) => void): this; - on(event: 'sniff', listener: (err: ApiError, meta: RequestEvent) => void): this; - on(event: 'resurrect', listener: (err: null, meta: ResurrectEvent) => void): this; - once(event: 'request', listener: (err: ApiError, meta: RequestEvent) => void): this; - once(event: 'response', listener: (err: ApiError, meta: RequestEvent) => void): this; - once(event: 'sniff', listener: (err: ApiError, meta: RequestEvent) => void): this; - once(event: 'resurrect', listener: (err: null, meta: ResurrectEvent) => void): this; - off(event: string | symbol, listener: (...args: any[]) => void): this; - bulk(params: T.BulkRequest, options?: TransportRequestOptions): TransportRequestPromise> - cat: { - aliases(params?: T.CatAliasesRequest, options?: TransportRequestOptions): TransportRequestPromise> - allocation(params?: T.CatAllocationRequest, options?: TransportRequestOptions): TransportRequestPromise> - count(params?: T.CatCountRequest, options?: TransportRequestOptions): TransportRequestPromise> - fielddata(params?: T.CatFielddataRequest, options?: TransportRequestOptions): TransportRequestPromise> - health(params?: T.CatHealthRequest, options?: TransportRequestOptions): TransportRequestPromise> - help(params?: T.CatHelpRequest, options?: TransportRequestOptions): TransportRequestPromise> - indices(params?: T.CatIndicesRequest, options?: TransportRequestOptions): TransportRequestPromise> - master(params?: T.CatMasterRequest, options?: TransportRequestOptions): TransportRequestPromise> - nodeattrs(params?: T.CatNodeAttributesRequest, options?: TransportRequestOptions): TransportRequestPromise> - nodes(params?: T.CatNodesRequest, options?: TransportRequestOptions): TransportRequestPromise> - pendingTasks(params?: T.CatPendingTasksRequest, options?: TransportRequestOptions): TransportRequestPromise> - plugins(params?: T.CatPluginsRequest, options?: TransportRequestOptions): TransportRequestPromise> - recovery(params?: T.CatRecoveryRequest, options?: TransportRequestOptions): TransportRequestPromise> - repositories(params?: T.CatRepositoriesRequest, options?: TransportRequestOptions): TransportRequestPromise> - segments(params?: T.CatSegmentsRequest, options?: TransportRequestOptions): TransportRequestPromise> - shards(params?: T.CatShardsRequest, options?: TransportRequestOptions): TransportRequestPromise> - snapshots(params?: T.CatSnapshotsRequest, options?: TransportRequestOptions): TransportRequestPromise> - tasks(params?: T.CatTasksRequest, options?: TransportRequestOptions): TransportRequestPromise> - templates(params?: T.CatTemplatesRequest, options?: TransportRequestOptions): TransportRequestPromise> - threadPool(params?: T.CatThreadPoolRequest, options?: TransportRequestOptions): TransportRequestPromise> - } - clearScroll(params?: T.ClearScrollRequest, options?: TransportRequestOptions): TransportRequestPromise> - cluster: { - allocationExplain(params?: T.ClusterAllocationExplainRequest, options?: TransportRequestOptions): TransportRequestPromise> - deleteComponentTemplate(params: T.ClusterDeleteComponentTemplateRequest, options?: TransportRequestOptions): TransportRequestPromise> - deleteVotingConfigExclusions(params?: TODO, options?: TransportRequestOptions): TransportRequestPromise> - existsComponentTemplate(params?: TODO, options?: TransportRequestOptions): TransportRequestPromise> - getComponentTemplate(params?: T.ClusterGetComponentTemplateRequest, options?: TransportRequestOptions): TransportRequestPromise> - getSettings(params?: T.ClusterGetSettingsRequest, options?: TransportRequestOptions): TransportRequestPromise> - health(params?: T.ClusterHealthRequest, options?: TransportRequestOptions): TransportRequestPromise> - pendingTasks(params?: T.ClusterPendingTasksRequest, options?: TransportRequestOptions): TransportRequestPromise> - postVotingConfigExclusions(params?: TODO, options?: TransportRequestOptions): TransportRequestPromise> - putComponentTemplate(params: T.ClusterPutComponentTemplateRequest, options?: TransportRequestOptions): TransportRequestPromise> - putSettings(params?: T.ClusterPutSettingsRequest, options?: TransportRequestOptions): TransportRequestPromise> - remoteInfo(params?: T.ClusterRemoteInfoRequest, options?: TransportRequestOptions): TransportRequestPromise> - reroute(params?: T.ClusterRerouteRequest, options?: TransportRequestOptions): TransportRequestPromise> - state(params?: T.ClusterStateRequest, options?: TransportRequestOptions): TransportRequestPromise> - stats(params?: T.ClusterStatsRequest, options?: TransportRequestOptions): TransportRequestPromise> - } - count(params?: T.CountRequest, options?: TransportRequestOptions): TransportRequestPromise> - create(params: T.CreateRequest, options?: TransportRequestOptions): TransportRequestPromise> - danglingIndices: { - deleteDanglingIndex(params?: TODO, options?: TransportRequestOptions): TransportRequestPromise> - importDanglingIndex(params?: TODO, options?: TransportRequestOptions): TransportRequestPromise> - listDanglingIndices(params?: TODO, options?: TransportRequestOptions): TransportRequestPromise> - } - delete(params: T.DeleteRequest, options?: TransportRequestOptions): TransportRequestPromise> - deleteByQuery(params: T.DeleteByQueryRequest, options?: TransportRequestOptions): TransportRequestPromise> - deleteByQueryRethrottle(params: T.DeleteByQueryRethrottleRequest, options?: TransportRequestOptions): TransportRequestPromise> - deleteScript(params: T.DeleteScriptRequest, options?: TransportRequestOptions): TransportRequestPromise> - exists(params: T.ExistsRequest, options?: TransportRequestOptions): TransportRequestPromise> - existsSource(params: T.ExistsSourceRequest, options?: TransportRequestOptions): TransportRequestPromise> - explain(params: T.ExplainRequest, options?: TransportRequestOptions): TransportRequestPromise, TContext>> - features: { - getFeatures(params?: TODO, options?: TransportRequestOptions): TransportRequestPromise> - resetFeatures(params?: TODO, options?: TransportRequestOptions): TransportRequestPromise> - } - fieldCaps(params?: T.FieldCapsRequest, options?: TransportRequestOptions): TransportRequestPromise> - get(params: T.GetRequest, options?: TransportRequestOptions): TransportRequestPromise, TContext>> - getScript(params: T.GetScriptRequest, options?: TransportRequestOptions): TransportRequestPromise> - getScriptContext(params?: T.GetScriptContextRequest, options?: TransportRequestOptions): TransportRequestPromise> - getScriptLanguages(params?: T.GetScriptLanguagesRequest, options?: TransportRequestOptions): TransportRequestPromise> - getSource(params?: T.GetSourceRequest, options?: TransportRequestOptions): TransportRequestPromise, TContext>> - index(params: T.IndexRequest, options?: TransportRequestOptions): TransportRequestPromise> - indices: { - addBlock(params: T.IndicesAddBlockRequest, options?: TransportRequestOptions): TransportRequestPromise> - analyze(params?: T.IndicesAnalyzeRequest, options?: TransportRequestOptions): TransportRequestPromise> - clearCache(params?: T.IndicesClearCacheRequest, options?: TransportRequestOptions): TransportRequestPromise> - clone(params: T.IndicesCloneRequest, options?: TransportRequestOptions): TransportRequestPromise> - close(params: T.IndicesCloseRequest, options?: TransportRequestOptions): TransportRequestPromise> - create(params: T.IndicesCreateRequest, options?: TransportRequestOptions): TransportRequestPromise> - delete(params: T.IndicesDeleteRequest, options?: TransportRequestOptions): TransportRequestPromise> - deleteAlias(params: T.IndicesDeleteAliasRequest, options?: TransportRequestOptions): TransportRequestPromise> - deleteIndexTemplate(params: T.IndicesDeleteIndexTemplateRequest, options?: TransportRequestOptions): TransportRequestPromise> - deleteTemplate(params: T.IndicesDeleteTemplateRequest, options?: TransportRequestOptions): TransportRequestPromise> - diskUsage(params?: TODO, options?: TransportRequestOptions): TransportRequestPromise> - exists(params: T.IndicesExistsRequest, options?: TransportRequestOptions): TransportRequestPromise> - existsAlias(params: T.IndicesExistsAliasRequest, options?: TransportRequestOptions): TransportRequestPromise> - existsIndexTemplate(params: T.IndicesExistsIndexTemplateRequest, options?: TransportRequestOptions): TransportRequestPromise> - existsTemplate(params: T.IndicesExistsTemplateRequest, options?: TransportRequestOptions): TransportRequestPromise> - existsType(params: T.IndicesExistsTypeRequest, options?: TransportRequestOptions): TransportRequestPromise> - fieldUsageStats(params?: TODO, options?: TransportRequestOptions): TransportRequestPromise> - flush(params?: T.IndicesFlushRequest, options?: TransportRequestOptions): TransportRequestPromise> - flushSynced(params?: T.IndicesFlushSyncedRequest, options?: TransportRequestOptions): TransportRequestPromise> - forcemerge(params?: T.IndicesForcemergeRequest, options?: TransportRequestOptions): TransportRequestPromise> - get(params: T.IndicesGetRequest, options?: TransportRequestOptions): TransportRequestPromise> - getAlias(params?: T.IndicesGetAliasRequest, options?: TransportRequestOptions): TransportRequestPromise> - getFieldMapping(params: T.IndicesGetFieldMappingRequest, options?: TransportRequestOptions): TransportRequestPromise> - getIndexTemplate(params?: T.IndicesGetIndexTemplateRequest, options?: TransportRequestOptions): TransportRequestPromise> - getMapping(params?: T.IndicesGetMappingRequest, options?: TransportRequestOptions): TransportRequestPromise> - getSettings(params?: T.IndicesGetSettingsRequest, options?: TransportRequestOptions): TransportRequestPromise> - getTemplate(params?: T.IndicesGetTemplateRequest, options?: TransportRequestOptions): TransportRequestPromise> - getUpgrade(params?: TODO, options?: TransportRequestOptions): TransportRequestPromise> - open(params: T.IndicesOpenRequest, options?: TransportRequestOptions): TransportRequestPromise> - putAlias(params: T.IndicesPutAliasRequest, options?: TransportRequestOptions): TransportRequestPromise> - putIndexTemplate(params: T.IndicesPutIndexTemplateRequest, options?: TransportRequestOptions): TransportRequestPromise> - putMapping(params?: T.IndicesPutMappingRequest, options?: TransportRequestOptions): TransportRequestPromise> - putSettings(params?: T.IndicesPutSettingsRequest, options?: TransportRequestOptions): TransportRequestPromise> - putTemplate(params: T.IndicesPutTemplateRequest, options?: TransportRequestOptions): TransportRequestPromise> - recovery(params?: T.IndicesRecoveryRequest, options?: TransportRequestOptions): TransportRequestPromise> - refresh(params?: T.IndicesRefreshRequest, options?: TransportRequestOptions): TransportRequestPromise> - resolveIndex(params: T.IndicesResolveIndexRequest, options?: TransportRequestOptions): TransportRequestPromise> - rollover(params: T.IndicesRolloverRequest, options?: TransportRequestOptions): TransportRequestPromise> - segments(params?: T.IndicesSegmentsRequest, options?: TransportRequestOptions): TransportRequestPromise> - shardStores(params?: T.IndicesShardStoresRequest, options?: TransportRequestOptions): TransportRequestPromise> - shrink(params: T.IndicesShrinkRequest, options?: TransportRequestOptions): TransportRequestPromise> - simulateIndexTemplate(params?: T.IndicesSimulateIndexTemplateRequest, options?: TransportRequestOptions): TransportRequestPromise> - simulateTemplate(params?: TODO, options?: TransportRequestOptions): TransportRequestPromise> - split(params: T.IndicesSplitRequest, options?: TransportRequestOptions): TransportRequestPromise> - stats(params?: T.IndicesStatsRequest, options?: TransportRequestOptions): TransportRequestPromise> - updateAliases(params?: T.IndicesUpdateAliasesRequest, options?: TransportRequestOptions): TransportRequestPromise> - upgrade(params?: TODO, options?: TransportRequestOptions): TransportRequestPromise> - validateQuery(params?: T.IndicesValidateQueryRequest, options?: TransportRequestOptions): TransportRequestPromise> - } - info(params?: T.InfoRequest, options?: TransportRequestOptions): TransportRequestPromise> - ingest: { - deletePipeline(params: T.IngestDeletePipelineRequest, options?: TransportRequestOptions): TransportRequestPromise> - geoIpStats(params?: T.IngestGeoIpStatsRequest, options?: TransportRequestOptions): TransportRequestPromise> - getPipeline(params?: T.IngestGetPipelineRequest, options?: TransportRequestOptions): TransportRequestPromise> - processorGrok(params?: T.IngestProcessorGrokRequest, options?: TransportRequestOptions): TransportRequestPromise> - putPipeline(params: T.IngestPutPipelineRequest, options?: TransportRequestOptions): TransportRequestPromise> - simulate(params?: T.IngestSimulatePipelineRequest, options?: TransportRequestOptions): TransportRequestPromise> - } - mget(params?: T.MgetRequest, options?: TransportRequestOptions): TransportRequestPromise, TContext>> - msearch(params?: T.MsearchRequest, options?: TransportRequestOptions): TransportRequestPromise, TContext>> - msearchTemplate(params?: T.MsearchTemplateRequest, options?: TransportRequestOptions): TransportRequestPromise, TContext>> - mtermvectors(params?: T.MtermvectorsRequest, options?: TransportRequestOptions): TransportRequestPromise> - nodes: { - clearMeteringArchive(params?: TODO, options?: TransportRequestOptions): TransportRequestPromise> - getMeteringInfo(params?: TODO, options?: TransportRequestOptions): TransportRequestPromise> - hotThreads(params?: T.NodesHotThreadsRequest, options?: TransportRequestOptions): TransportRequestPromise> - info(params?: T.NodesInfoRequest, options?: TransportRequestOptions): TransportRequestPromise> - reloadSecureSettings(params?: T.NodesReloadSecureSettingsRequest, options?: TransportRequestOptions): TransportRequestPromise> - stats(params?: T.NodesStatsRequest, options?: TransportRequestOptions): TransportRequestPromise> - usage(params?: T.NodesUsageRequest, options?: TransportRequestOptions): TransportRequestPromise> - } - ping(params?: T.PingRequest, options?: TransportRequestOptions): TransportRequestPromise> - putScript(params: T.PutScriptRequest, options?: TransportRequestOptions): TransportRequestPromise> - rankEval(params: T.RankEvalRequest, options?: TransportRequestOptions): TransportRequestPromise> - reindex(params?: T.ReindexRequest, options?: TransportRequestOptions): TransportRequestPromise> - reindexRethrottle(params: T.ReindexRethrottleRequest, options?: TransportRequestOptions): TransportRequestPromise> - renderSearchTemplate(params?: T.RenderSearchTemplateRequest, options?: TransportRequestOptions): TransportRequestPromise> - scriptsPainlessExecute(params?: T.ScriptsPainlessExecuteRequest, options?: TransportRequestOptions): TransportRequestPromise, TContext>> - scroll(params?: T.ScrollRequest, options?: TransportRequestOptions): TransportRequestPromise, TContext>> - search(params?: T.SearchRequest, options?: TransportRequestOptions): TransportRequestPromise, TContext>> - searchShards(params?: T.SearchShardsRequest, options?: TransportRequestOptions): TransportRequestPromise> - searchTemplate(params?: T.SearchTemplateRequest, options?: TransportRequestOptions): TransportRequestPromise, TContext>> - shutdown: { - deleteNode(params?: TODO, options?: TransportRequestOptions): TransportRequestPromise> - getNode(params?: TODO, options?: TransportRequestOptions): TransportRequestPromise> - putNode(params?: TODO, options?: TransportRequestOptions): TransportRequestPromise> - } - snapshot: { - cleanupRepository(params: T.SnapshotCleanupRepositoryRequest, options?: TransportRequestOptions): TransportRequestPromise> - clone(params: T.SnapshotCloneRequest, options?: TransportRequestOptions): TransportRequestPromise> - create(params: T.SnapshotCreateRequest, options?: TransportRequestOptions): TransportRequestPromise> - createRepository(params: T.SnapshotCreateRepositoryRequest, options?: TransportRequestOptions): TransportRequestPromise> - delete(params: T.SnapshotDeleteRequest, options?: TransportRequestOptions): TransportRequestPromise> - deleteRepository(params: T.SnapshotDeleteRepositoryRequest, options?: TransportRequestOptions): TransportRequestPromise> - get(params: T.SnapshotGetRequest, options?: TransportRequestOptions): TransportRequestPromise> - getRepository(params?: T.SnapshotGetRepositoryRequest, options?: TransportRequestOptions): TransportRequestPromise> - repositoryAnalyze(params?: TODO, options?: TransportRequestOptions): TransportRequestPromise> - restore(params: T.SnapshotRestoreRequest, options?: TransportRequestOptions): TransportRequestPromise> - status(params?: T.SnapshotStatusRequest, options?: TransportRequestOptions): TransportRequestPromise> - verifyRepository(params: T.SnapshotVerifyRepositoryRequest, options?: TransportRequestOptions): TransportRequestPromise> - } - tasks: { - cancel(params?: T.TaskCancelRequest, options?: TransportRequestOptions): TransportRequestPromise> - get(params: T.TaskGetRequest, options?: TransportRequestOptions): TransportRequestPromise> - list(params?: T.TaskListRequest, options?: TransportRequestOptions): TransportRequestPromise> - } - termsEnum(params: T.TermsEnumRequest, options?: TransportRequestOptions): TransportRequestPromise> - termvectors(params: T.TermvectorsRequest, options?: TransportRequestOptions): TransportRequestPromise> - update(params: T.UpdateRequest, options?: TransportRequestOptions): TransportRequestPromise, TContext>> - updateByQuery(params: T.UpdateByQueryRequest, options?: TransportRequestOptions): TransportRequestPromise> - updateByQueryRethrottle(params: T.UpdateByQueryRethrottleRequest, options?: TransportRequestOptions): TransportRequestPromise> -} - -export { KibanaClient } diff --git a/api/new.d.ts b/api/new.d.ts index 6c5bfc1d9..a4faab64c 100644 --- a/api/new.d.ts +++ b/api/new.d.ts @@ -695,7 +695,7 @@ declare class Client { updateByQueryRethrottle(params: T.UpdateByQueryRethrottleRequest, options: TransportRequestOptions, callback: callbackFn): TransportRequestCallback } -export * as estypes from './types' +export * as ostypes from './types' export { Client, Transport, diff --git a/api/requestParams.d.ts b/api/requestParams.d.ts index 6a22b0b1c..a761c8462 100644 --- a/api/requestParams.d.ts +++ b/api/requestParams.d.ts @@ -494,7 +494,7 @@ export interface DeleteByQuery extends Generic { scroll_size?: number; wait_for_completion?: boolean; requests_per_second?: number; - slices?: number|string; + slices?: number | string; body: T; } @@ -1252,7 +1252,7 @@ export interface Reindex extends Generic { wait_for_completion?: boolean; requests_per_second?: number; scroll?: string; - slices?: number|string; + slices?: number | string; max_docs?: number; body: T; } @@ -1572,7 +1572,7 @@ export interface UpdateByQuery extends Generic { scroll_size?: number; wait_for_completion?: boolean; requests_per_second?: number; - slices?: number|string; + slices?: number | string; body?: T; } diff --git a/api/types.d.ts b/api/types.d.ts index e7d3eee39..028e60bbc 100644 --- a/api/types.d.ts +++ b/api/types.d.ts @@ -504,7 +504,7 @@ export interface InfoResponse { cluster_name: Name cluster_uuid: Uuid name: Name - version: ElasticsearchVersionInfo + version: OpenSearchVersionInfo } export interface MgetHit { @@ -1141,7 +1141,7 @@ export interface SearchGeoDistanceSortKeys { unit?: DistanceUnit } export type SearchGeoDistanceSort = SearchGeoDistanceSortKeys | - { [property: string]: QueryDslGeoLocation | QueryDslGeoLocation[] } +{ [property: string]: QueryDslGeoLocation | QueryDslGeoLocation[] } export interface SearchHighlight { fields: Record @@ -1407,7 +1407,7 @@ export interface SearchSortContainerKeys { _script?: SearchScriptSort } export type SearchSortContainer = SearchSortContainerKeys | - { [property: string]: SearchFieldSort | SearchSortOrder } +{ [property: string]: SearchFieldSort | SearchSortOrder } export type SearchSortMode = 'min' | 'max' | 'sum' | 'avg' | 'median' @@ -1828,7 +1828,7 @@ export interface DocStats { deleted: long } -export interface ElasticsearchVersionInfo { +export interface OpenSearchVersionInfo { build_date: DateString build_hash: string build_snapshot: boolean @@ -2095,7 +2095,7 @@ export type PipelineName = string export interface PluginStats { classname: string description: string - elasticsearch_version: VersionString + opensearch_version: VersionString extended_plugins: string[] has_native_controller: boolean java_version: VersionString @@ -2561,7 +2561,7 @@ export interface AggregationsCompositeAggregationSource { export interface AggregationsCompositeBucketKeys { } export type AggregationsCompositeBucket = AggregationsCompositeBucketKeys | - { [property: string]: AggregationsAggregate } +{ [property: string]: AggregationsAggregate } export interface AggregationsCompositeBucketAggregate extends AggregationsMultiBucketAggregate> { after_key: Record @@ -2594,7 +2594,7 @@ export interface AggregationsDateHistogramBucketKeys { } export type AggregationsDateHistogramBucket = AggregationsDateHistogramBucketKeys | - { [property: string]: AggregationsAggregate } +{ [property: string]: AggregationsAggregate } export type AggregationsDateInterval = 'second' | 'minute' | 'hour' | 'day' | 'week' | 'month' | 'quarter' | 'year' @@ -2668,7 +2668,7 @@ export interface AggregationsFiltersBucketItemKeys { doc_count: long } export type AggregationsFiltersBucketItem = AggregationsFiltersBucketItemKeys | - { [property: string]: AggregationsAggregate } +{ [property: string]: AggregationsAggregate } export interface AggregationsFormatMetricAggregationBase extends AggregationsMetricAggregationBase { format?: string @@ -2832,7 +2832,7 @@ export interface AggregationsIpRangeAggregationRange { export interface AggregationsIpRangeBucketKeys { } export type AggregationsIpRangeBucket = AggregationsIpRangeBucketKeys | - { [property: string]: AggregationsAggregate } +{ [property: string]: AggregationsAggregate } export interface AggregationsKeyedBucketKeys { doc_count: long @@ -2840,7 +2840,7 @@ export interface AggregationsKeyedBucketKeys { key_as_string: string } export type AggregationsKeyedBucket = AggregationsKeyedBucketKeys | - { [property: string]: AggregationsAggregate } +{ [property: string]: AggregationsAggregate } export interface AggregationsKeyedValueAggregate extends AggregationsValueAggregate { keys: string[] @@ -3004,7 +3004,7 @@ export interface AggregationsRangeAggregation extends AggregationsBucketAggregat export interface AggregationsRangeBucketKeys { } export type AggregationsRangeBucket = AggregationsRangeBucketKeys | - { [property: string]: AggregationsAggregate } +{ [property: string]: AggregationsAggregate } export interface AggregationsRareTermsAggregation extends AggregationsBucketAggregationBase { exclude?: string | string[] @@ -3019,7 +3019,7 @@ export interface AggregationsRareTermsAggregation extends AggregationsBucketAggr export interface AggregationsRareTermsBucketKeys { } export type AggregationsRareTermsBucket = AggregationsRareTermsBucketKeys | - { [property: string]: AggregationsAggregate } +{ [property: string]: AggregationsAggregate } export interface AggregationsRateAggregation extends AggregationsFormatMetricAggregationBase { unit?: AggregationsDateInterval @@ -3088,7 +3088,7 @@ export interface AggregationsSignificantTermsAggregation extends AggregationsBuc export interface AggregationsSignificantTermsBucketKeys { } export type AggregationsSignificantTermsBucket = AggregationsSignificantTermsBucketKeys | - { [property: string]: AggregationsAggregate } +{ [property: string]: AggregationsAggregate } export interface AggregationsSignificantTextAggregation extends AggregationsBucketAggregationBase { background_filter?: QueryDslQueryContainer @@ -3113,7 +3113,7 @@ export interface AggregationsSingleBucketAggregateKeys extends AggregationsAggre doc_count: double } export type AggregationsSingleBucketAggregate = AggregationsSingleBucketAggregateKeys | - { [property: string]: AggregationsAggregate } +{ [property: string]: AggregationsAggregate } export interface AggregationsStandardDeviationBounds { lower?: double @@ -4044,7 +4044,7 @@ export interface QueryDslConstantScoreQuery extends QueryDslQueryBase { export interface QueryDslDateDecayFunctionKeys extends QueryDslDecayFunctionBase { } export type QueryDslDateDecayFunction = QueryDslDateDecayFunctionKeys | - { [property: string]: QueryDslDecayPlacement } +{ [property: string]: QueryDslDecayPlacement } export type QueryDslDecayFunction = QueryDslDateDecayFunction | QueryDslNumericDecayFunction | QueryDslGeoDecayFunction @@ -4136,7 +4136,7 @@ export type QueryDslGeoCoordinate = string | double[] | QueryDslThreeDimensional export interface QueryDslGeoDecayFunctionKeys extends QueryDslDecayFunctionBase { } export type QueryDslGeoDecayFunction = QueryDslGeoDecayFunctionKeys | - { [property: string]: QueryDslDecayPlacement } +{ [property: string]: QueryDslDecayPlacement } export interface QueryDslGeoDistanceQueryKeys extends QueryDslQueryBase { distance?: Distance @@ -4144,7 +4144,7 @@ export interface QueryDslGeoDistanceQueryKeys extends QueryDslQueryBase { validation_method?: QueryDslGeoValidationMethod } export type QueryDslGeoDistanceQuery = QueryDslGeoDistanceQueryKeys | - { [property: string]: QueryDslGeoLocation } +{ [property: string]: QueryDslGeoLocation } export type QueryDslGeoExecution = 'memory' | 'indexed' @@ -4373,7 +4373,7 @@ export interface QueryDslNamedQueryKeys { ignore_unmapped?: boolean } export type QueryDslNamedQuery = QueryDslNamedQueryKeys | - { [property: string]: TQuery } +{ [property: string]: TQuery } export interface QueryDslNestedQuery extends QueryDslQueryBase { ignore_unmapped?: boolean @@ -4388,7 +4388,7 @@ export type QueryDslNestedScoreMode = 'avg' | 'sum' | 'min' | 'max' | 'none' export interface QueryDslNumericDecayFunctionKeys extends QueryDslDecayFunctionBase { } export type QueryDslNumericDecayFunction = QueryDslNumericDecayFunctionKeys | - { [property: string]: QueryDslDecayPlacement } +{ [property: string]: QueryDslDecayPlacement } export type QueryDslOperator = 'and' | 'or' | 'AND' | 'OR' diff --git a/api/utils.js b/api/utils.js index 2ed0fa64c..97d40c3e6 100644 --- a/api/utils.js +++ b/api/utils.js @@ -64,6 +64,6 @@ function normalizeArguments (params, options, callback) { return [params, options, callback] } -function noop () {} +function noop () { } module.exports = { handleError, snakeCaseKeys, normalizeArguments, noop, kConfigurationError } diff --git a/docs/examples/asStream.asciidoc b/docs/examples/asStream.asciidoc index a80fd549b..1333e4104 100644 --- a/docs/examples/asStream.asciidoc +++ b/docs/examples/asStream.asciidoc @@ -8,7 +8,7 @@ data. ---- 'use strict' -const { Client } = require('@elastic/elasticsearch') +const { Client } = require('@opensearch/opensearch') const client = new Client({ node: 'http://localhost:9200' }) async function run () { @@ -77,14 +77,14 @@ async function run () { run().catch(console.log) ---- -TIP: This can be useful if you need to pipe the {es}'s response to a proxy, or +TIP: This can be useful if you need to pipe the {opensearch}'s response to a proxy, or send it directly to another source. [source,js] ---- 'use strict' -const { Client } = require('@elastic/elasticsearch') +const { Client } = require('@opensearch/opensearch') const client = new Client({ node: 'http://localhost:9200' }) const fastify = require('fastify')() diff --git a/docs/examples/bulk.asciidoc b/docs/examples/bulk.asciidoc index 2f05b4cf1..d9198baca 100644 --- a/docs/examples/bulk.asciidoc +++ b/docs/examples/bulk.asciidoc @@ -11,7 +11,7 @@ NOTE: Did you know that we provide an helper for sending bulk request? You can f 'use strict' require('array.prototype.flatmap').shim() -const { Client } = require('@elastic/elasticsearch') +const { Client } = require('@opensearch/opensearch') const client = new Client({ node: 'http://localhost:9200' }) diff --git a/docs/examples/exists.asciidoc b/docs/examples/exists.asciidoc index 851f8c471..54f65e47b 100644 --- a/docs/examples/exists.asciidoc +++ b/docs/examples/exists.asciidoc @@ -9,7 +9,7 @@ NOTE: Since this API uses the `HEAD` method, the body value will be boolean. --------- 'use strict' -const { Client } = require('@elastic/elasticsearch') +const { Client } = require('@opensearch/opensearch') const client = new Client({ node: 'http://localhost:9200' }) async function run () { diff --git a/docs/examples/get.asciidoc b/docs/examples/get.asciidoc index 18caf1f4d..3ec2f2a3d 100644 --- a/docs/examples/get.asciidoc +++ b/docs/examples/get.asciidoc @@ -9,7 +9,7 @@ The following example gets a JSON document from an index called --------- 'use strict' -const { Client } = require('@elastic/elasticsearch') +const { Client } = require('@opensearch/opensearch') const client = new Client({ node: 'http://localhost:9200' }) async function run () { diff --git a/docs/examples/ignore.asciidoc b/docs/examples/ignore.asciidoc index a46f3e708..1f334cf97 100644 --- a/docs/examples/ignore.asciidoc +++ b/docs/examples/ignore.asciidoc @@ -7,7 +7,7 @@ HTTP status codes which should not be considered errors for this request. ---- 'use strict' -const { Client } = require('@elastic/elasticsearch') +const { Client } = require('@opensearch/opensearch') const client = new Client({ node: 'http://localhost:9200' }) async function run () { diff --git a/docs/examples/msearch.asciidoc b/docs/examples/msearch.asciidoc index 3773318f4..54973cb64 100644 --- a/docs/examples/msearch.asciidoc +++ b/docs/examples/msearch.asciidoc @@ -8,7 +8,7 @@ API. ---- 'use strict' -const { Client } = require('@elastic/elasticsearch') +const { Client } = require('@opensearch/opensearch') const client = new Client({ node: 'http://localhost:9200' }) async function run () { diff --git a/docs/examples/proxy/README.md b/docs/examples/proxy/README.md index 23e835aae..05cf534bd 100644 --- a/docs/examples/proxy/README.md +++ b/docs/examples/proxy/README.md @@ -1,7 +1,7 @@ -# Elasticsearch proxy example +# OpenSearchproxy example This folder contains an example of how to build a lightweight proxy -between your frontend code and Elasticsearch if you don't +between your frontend code and OpenSearchif you don't have a more sophisticated backend in place yet. > **IMPORTANT:** This is not a production ready code and it is only for demonstration purposes, @@ -25,16 +25,13 @@ In each endpoint you should configure the `INDEX` variable. ## How to use -Create an account on Vercel, then create a deployment on Elastic Cloud. If you -don't have an account on Elastic Cloud, you can create one with a free 14-day trial -of the [Elasticsearch Service](https://www.elastic.co/elasticsearch/service). +Create an account on Vercel, then create a deployment. -### Configure Elasticsearch +### Configure OpenSearch -Once you have created a deployment on Elastic Cloud copy the generated Cloud Id and the credentials. -Then open `utils/prepare-elasticsearch.js` and fill your credentials. The script generates -an [Api Key](https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-create-api-key.html) -that you can use for authenticating your request. Based on the configuration of the Api Key, you will be able +Once you have created a deployment on opensearch Cloud copy the generated Cloud Id and the credentials. +Then open `utils/prepare-opensearch.js` and fill your credentials. The script generates +an Api Key that you can use for authenticating your request. Based on the configuration of the Api Key, you will be able to perform different operation on the specified indices or index pattern. ### Configure Vercel @@ -42,16 +39,10 @@ to perform different operation on the specified indices or index pattern. Install the [Vercel CLI](https://vercel.com/docs/cli) to bootstrap the project, or read the [quickstart](https://vercel.com/docs) documentation. -If you are using the CLI, bootstrap the project by running `vercel`. Test the project locally -with `vercel dev`, and deploy it with `vercel deploy`. -Configure the `ELASTIC_CLOUD_ID` [environment varible](https://vercel.com/docs/environment-variables) as well. -The Api Key is passed from the frontend app via a `Authorization` header as `Bearer` token and is -used to authorize the API calls to the endpoints as well. -Additional configuration, such as CORS, can be added to [`vercel.json`](https://vercel.com/docs/configuration). ## Authentication -If you are using Elasticsearch only for search purposes, such as a search box, you can create +If you are using OpenSearchonly for search purposes, such as a search box, you can create an Api Key with `read` permissions and store it in your frontend app. Then you can send it via `Authorization` header to the proxy and run your searches. diff --git a/docs/examples/proxy/api/autocomplete.js b/docs/examples/proxy/api/autocomplete.js index 5ebf47ff0..adeec17f6 100644 --- a/docs/examples/proxy/api/autocomplete.js +++ b/docs/examples/proxy/api/autocomplete.js @@ -35,13 +35,13 @@ 'use strict' -const { Client } = require('@elastic/elasticsearch') +const { Client } = require('@opensearch/opensearch') const authorize = require('../utils/authorize') const INDEX = '' const client = new Client({ cloud: { - id: process.env.ELASTIC_CLOUD_ID + id: process.env.OPENSEARCH_CLOUD_ID } }) @@ -77,7 +77,7 @@ module.exports = async (req, res) => { const response = await client.search({ index: INDEX, // You could directly send from the browser - // the Elasticsearch's query DSL, but it will + // the OpenSearch's query DSL, but it will // expose you to the risk that a malicious user // could overload your cluster by crafting // expensive queries. diff --git a/docs/examples/proxy/api/delete.js b/docs/examples/proxy/api/delete.js index 4f5f72694..576f6ec3f 100644 --- a/docs/examples/proxy/api/delete.js +++ b/docs/examples/proxy/api/delete.js @@ -35,13 +35,13 @@ 'use strict' -const { Client } = require('@elastic/elasticsearch') +const { Client } = require('@opensearch/opensearch') const authorize = require('../utils/authorize') const INDEX = '' const client = new Client({ cloud: { - id: process.env.ELASTIC_CLOUD_ID + id: process.env.OPENSEARCH_CLOUD_ID } }) diff --git a/docs/examples/proxy/api/index.js b/docs/examples/proxy/api/index.js index 2ccd50972..65498b8ae 100644 --- a/docs/examples/proxy/api/index.js +++ b/docs/examples/proxy/api/index.js @@ -35,13 +35,13 @@ 'use strict' -const { Client } = require('@elastic/elasticsearch') +const { Client } = require('@opensearch/opensearch') const authorize = require('../utils/authorize') const INDEX = '' const client = new Client({ cloud: { - id: process.env.ELASTIC_CLOUD_ID + id: process.env.OPENSEARCH_CLOUD_ID } }) diff --git a/docs/examples/proxy/api/search.js b/docs/examples/proxy/api/search.js index b51f32113..71c42700d 100644 --- a/docs/examples/proxy/api/search.js +++ b/docs/examples/proxy/api/search.js @@ -35,13 +35,13 @@ 'use strict' -const { Client } = require('@elastic/elasticsearch') +const { Client } = require('@opensearch/opensearch') const authorize = require('../utils/authorize') const INDEX = '' const client = new Client({ cloud: { - id: process.env.ELASTIC_CLOUD_ID + id: process.env.OPENSEARCH_CLOUD_ID } }) @@ -67,7 +67,7 @@ module.exports = async (req, res) => { const response = await client.search({ index: INDEX, // You could directly send from the browser - // the Elasticsearch's query DSL, but it will + // the OpenSearch's query DSL, but it will // expose you to the risk that a malicious user // could overload your cluster by crafting // expensive queries. diff --git a/docs/examples/proxy/package.json b/docs/examples/proxy/package.json index 34af9b020..83293c5d8 100644 --- a/docs/examples/proxy/package.json +++ b/docs/examples/proxy/package.json @@ -11,7 +11,7 @@ "author": "Tomas Della Vedova", "license": "Apache-2.0", "dependencies": { - "@elastic/elasticsearch": "^7.10.0" + "@opensearch/opensearch": "^7.10.0" }, "devDependencies": { "standard": "^16.0.3" diff --git a/docs/examples/proxy/utils/prepare-elasticsearch.js b/docs/examples/proxy/utils/prepare-elasticsearch.js index defb35bea..0e459fe16 100644 --- a/docs/examples/proxy/utils/prepare-elasticsearch.js +++ b/docs/examples/proxy/utils/prepare-elasticsearch.js @@ -30,7 +30,7 @@ 'use strict' -const { Client } = require('@elastic/elasticsearch') +const { Client } = require('@opensearch/opensearch') // Your Cloud Id const cloudId = '' @@ -40,7 +40,6 @@ const username = '' const password = '' // The indices or index patterns you will need to access const indexNames = ['my-index-name-or-pattern'] -// see https://www.elastic.co/guide/en/elasticsearch/reference/current/security-privileges.html#privileges-list-indices const privileges = ['read'] async function generateApiKeys (opts) { @@ -56,9 +55,9 @@ async function generateApiKeys (opts) { const { body } = await client.security.createApiKey({ body: { - name: 'elasticsearch-proxy', + name: 'opensearch-proxy', role_descriptors: { - 'elasticsearch-proxy-users': { + 'opensearch-proxy-users': { index: [{ names: indexNames, privileges diff --git a/docs/examples/reindex.asciidoc b/docs/examples/reindex.asciidoc index 8ed6f1800..f82451e82 100644 --- a/docs/examples/reindex.asciidoc +++ b/docs/examples/reindex.asciidoc @@ -9,7 +9,7 @@ In the following example we have a `game-of-thrones` index which contains differ ---- 'use strict' -const { Client } = require('@elastic/elasticsearch') +const { Client } = require('@opensearch/opensearch') const client = new Client({ node: 'http://localhost:9200' }) async function run () { diff --git a/docs/examples/scroll.asciidoc b/docs/examples/scroll.asciidoc index b7a9aa07b..c4298cc1d 100644 --- a/docs/examples/scroll.asciidoc +++ b/docs/examples/scroll.asciidoc @@ -16,7 +16,7 @@ in time. Subsequent changes to documents (index, update or delete) will only affect later search requests. In order to use scrolling, the initial search request should specify the scroll -parameter in the query string, which tells Elasticsearch how long it should keep +parameter in the query string, which tells OpenSearch how long it should keep the “search context” alive. NOTE: Did you know that we provide an helper for sending scroll requests? You can find it {jsclient}/client-helpers.html[here]. @@ -25,7 +25,7 @@ NOTE: Did you know that we provide an helper for sending scroll requests? You ca ---- 'use strict' -const { Client } = require('@elastic/elasticsearch') +const { Client } = require('@opensearch/opensearch') const client = new Client({ node: 'http://localhost:9200' }) async function run () { @@ -119,7 +119,7 @@ async iteration! ---- 'use strict' -const { Client } = require('@elastic/elasticsearch') +const { Client } = require('@opensearch/opensearch') const client = new Client({ node: 'http://localhost:9200' }) // Scroll utility diff --git a/docs/examples/search.asciidoc b/docs/examples/search.asciidoc index fb4fe4d2b..156fd1fdd 100644 --- a/docs/examples/search.asciidoc +++ b/docs/examples/search.asciidoc @@ -3,15 +3,15 @@ The `search` API allows you to execute a search query and get back search hits that match the query. The query can either be provided using a simple -https://www.elastic.co/guide/en/elasticsearch/reference/6.6/search-uri-request.html[query string as a parameter], +https://www.opensearch.org[query string as a parameter], or using a -https://www.elastic.co/guide/en/elasticsearch/reference/6.6/search-request-body.html[request body]. +https://www.opensearch.org[request body]. [source,js] ---- 'use strict' -const { Client } = require('@elastic/elasticsearch') +const { Client } = require('@opensearch/opensearch') const client = new Client({ node: 'http://localhost:9200' }) async function run () { diff --git a/docs/examples/sql.asciidoc b/docs/examples/sql.asciidoc deleted file mode 100644 index cccc641ae..000000000 --- a/docs/examples/sql.asciidoc +++ /dev/null @@ -1,64 +0,0 @@ -[[sql_examples]] -== SQL - -Elasticsearch SQL is an X-Pack component that allows SQL-like queries to be executed in real-time against Elasticsearch. Whether using the REST interface, command-line or JDBC, any client can use SQL to search and aggregate data natively inside Elasticsearch. One can think of Elasticsearch SQL as a translator, one that understands both SQL and Elasticsearch and makes it easy to read and process data in real-time, at scale by leveraging Elasticsearch capabilities. - -In the following example we will search all the documents that has the field `house` equals to `stark`, log the result with the tabular view and then manipulate the result to obtain an object easy to navigate. - -[source,js] ----- -'use strict' - -const { Client } = require('@elastic/elasticsearch') -const client = new Client({ node: 'http://localhost:9200' }) - -async function run () { - await client.index({ - index: 'game-of-thrones', - body: { - character: 'Ned Stark', - quote: 'Winter is coming.', - house: 'stark' - } - }) - - await client.index({ - index: 'game-of-thrones', - body: { - character: 'Arya Stark', - quote: 'A girl is Arya Stark of Winterfell. And I\'m going home.', - house: 'stark' - } - }) - - await client.index({ - index: 'game-of-thrones', - refresh: true, - body: { - character: 'Tyrion Lannister', - quote: 'A Lannister always pays his debts.', - house: 'lannister' - } - }) - - const { body } = await client.sql.query({ - body: { - query: "SELECT * FROM \"game-of-thrones\" WHERE house='stark'" - } - }) - - console.log(body) - - const data = body.rows.map(row => { - const obj = {} - for (var i = 0; i < row.length; i++) { - obj[body.columns[i].name] = row[i] - } - return obj - }) - - console.log(data) -} - -run().catch(console.log) ----- diff --git a/docs/examples/sql.query.asciidoc b/docs/examples/sql.query.asciidoc deleted file mode 100644 index 50fb32841..000000000 --- a/docs/examples/sql.query.asciidoc +++ /dev/null @@ -1,64 +0,0 @@ -[[sql_query_examples]] -=== SQL - -Elasticsearch SQL is an X-Pack component that allows SQL-like queries to be executed in real-time against Elasticsearch. Whether using the REST interface, command-line or JDBC, any client can use SQL to search and aggregate data natively inside Elasticsearch. One can think of Elasticsearch SQL as a translator, one that understands both SQL and Elasticsearch and makes it easy to read and process data in real-time, at scale by leveraging Elasticsearch capabilities. - -In the following example we will search all the documents that has the field `house` equals to `stark`, log the result with the tabular view and then manipulate the result to obtain an object easy to navigate. - -[source,js] ----- -'use strict' - -const { Client } = require('@elastic/elasticsearch') -const client = new Client({ node: 'http://localhost:9200' }) - -async function run () { - await client.index({ - index: 'game-of-thrones', - body: { - character: 'Ned Stark', - quote: 'Winter is coming.', - house: 'stark' - } - }) - - await client.index({ - index: 'game-of-thrones', - body: { - character: 'Arya Stark', - quote: 'A girl is Arya Stark of Winterfell. And I\'m going home.', - house: 'stark' - } - }) - - await client.index({ - index: 'game-of-thrones', - refresh: true, - body: { - character: 'Tyrion Lannister', - quote: 'A Lannister always pays his debts.', - house: 'lannister' - } - }) - - const { body } = await client.sql.query({ - body: { - query: "SELECT * FROM \"game-of-thrones\" WHERE house='stark'" - } - }) - - console.log(body) - - const data = body.rows.map(row => { - const obj = {} - for (let i = 0; i < row.length; i++) { - obj[body.columns[i].name] = row[i] - } - return obj - }) - - console.log(data) -} - -run().catch(console.log) ----- diff --git a/docs/examples/suggest.asciidoc b/docs/examples/suggest.asciidoc index b03ece4e1..d68fa1317 100644 --- a/docs/examples/suggest.asciidoc +++ b/docs/examples/suggest.asciidoc @@ -11,7 +11,7 @@ request. If the query part is left out, only suggestions are returned. ---- 'use strict' -const { Client } = require('@elastic/elasticsearch') +const { Client } = require('@opensearch/opensearch') const client = new Client({ node: 'http://localhost:9200' }) async function run () { diff --git a/docs/examples/transport.request.asciidoc b/docs/examples/transport.request.asciidoc index d74c6c68d..fa6805909 100644 --- a/docs/examples/transport.request.asciidoc +++ b/docs/examples/transport.request.asciidoc @@ -1,10 +1,10 @@ [[transport_request_examples]] === transport.request -It can happen that you need to communicate with {es} by using an API that is not +It can happen that you need to communicate with {opensearch} by using an API that is not supported by the client, to mitigate this issue you can directly call `client.transport.request`, which is the internal utility that the client uses -to communicate with {es} when you use an API method. +to communicate with {opensearch} when you use an API method. NOTE: When using the `transport.request` method you must provide all the parameters needed to perform an HTTP call, such as `method`, `path`, @@ -19,7 +19,7 @@ maintain. ---- 'use strict' -const { Client } = require('@elastic/elasticsearch') +const { Client } = require('@opensearch/opensearch') const client = new Client({ node: 'http://localhost:9200' }) async function run () { diff --git a/docs/examples/typescript.asciidoc b/docs/examples/typescript.asciidoc index 2d39ed2ac..249b12f59 100644 --- a/docs/examples/typescript.asciidoc +++ b/docs/examples/typescript.asciidoc @@ -11,7 +11,7 @@ to define the API parameters instead of _camelCase_. ---- 'use strict' -import { Client, ApiResponse, RequestParams } from '@elastic/elasticsearch' +import { Client, ApiResponse, RequestParams } from '@opensearch/opensearch' const client = new Client({ node: 'http://localhost:9200' }) async function run (): void { diff --git a/docs/examples/update-by-query.asciidoc b/docs/examples/update-by-query.asciidoc index 7d4a647e7..9f164dc1b 100644 --- a/docs/examples/update-by-query.asciidoc +++ b/docs/examples/update-by-query.asciidoc @@ -7,7 +7,7 @@ The simplest usage of _update_by_query just performs an update on every document --------- 'use strict' -const { Client } = require('@elastic/elasticsearch') +const { Client } = require('@opensearch/opensearch') const client = new Client({ node: 'http://localhost:9200' }) async function run () { diff --git a/docs/examples/update.asciidoc b/docs/examples/update.asciidoc index c18a75ff7..bd4244ee6 100644 --- a/docs/examples/update.asciidoc +++ b/docs/examples/update.asciidoc @@ -8,7 +8,7 @@ In the following example, we will index a document that also tracks how many tim --------- 'use strict' -const { Client } = require('@elastic/elasticsearch') +const { Client } = require('@opensearch/opensearch') const client = new Client({ node: 'http://localhost:9200' }) async function run () { @@ -54,7 +54,7 @@ With the update API, you can also run a partial update of a document. --------- 'use strict' -const { Client } = require('@elastic/elasticsearch') +const { Client } = require('@opensearch/opensearch') const client = new Client({ node: 'http://localhost:9200' }) async function run () { diff --git a/docs/examples/update_by_query.asciidoc b/docs/examples/update_by_query.asciidoc index d7a9faee6..6eb217c2d 100644 --- a/docs/examples/update_by_query.asciidoc +++ b/docs/examples/update_by_query.asciidoc @@ -7,7 +7,7 @@ The simplest usage of _update_by_query just performs an update on every document --------- 'use strict' -const { Client } = require('@elastic/elasticsearch') +const { Client } = require('@opensearch/opensearch') const client = new Client({ node: 'http://localhost:9200' }) async function run () { diff --git a/index.d.ts b/index.d.ts index e3690c53c..315f71105 100644 --- a/index.d.ts +++ b/index.d.ts @@ -58,7 +58,7 @@ import { import Serializer from './lib/Serializer'; import Helpers from './lib/Helpers'; import * as errors from './lib/errors'; -import * as estypes from './api/types' +import * as ostypes from './api/types' import * as RequestParams from './api/requestParams' declare type callbackFn = (err: ApiError, result: ApiResponse) => void; @@ -1202,7 +1202,7 @@ export { ApiResponse, RequestEvent, ResurrectEvent, - estypes, + ostypes, RequestParams, ClientOptions, NodeOptions, diff --git a/index.js b/index.js index 550e06820..5c52c95bd 100644 --- a/index.js +++ b/index.js @@ -32,7 +32,7 @@ const { EventEmitter } = require('events') const { URL } = require('url') -const debug = require('debug')('elasticsearch') +const debug = require('debug')('opensearch') const Transport = require('./lib/Transport') const Connection = require('./lib/Connection') const { ConnectionPool, CloudConnectionPool } = require('./lib/pool') @@ -47,23 +47,22 @@ if (clientVersion.includes('-')) { // clean prerelease clientVersion = clientVersion.slice(0, clientVersion.indexOf('-')) + 'p' } -const nodeVersion = process.versions.node -const kInitialOptions = Symbol('elasticsearchjs-initial-options') -const kChild = Symbol('elasticsearchjs-child') -const kExtensions = Symbol('elasticsearchjs-extensions') -const kEventEmitter = Symbol('elasticsearchjs-event-emitter') +const kInitialOptions = Symbol('opensearchjs-initial-options') +const kChild = Symbol('opensearchjs-child') +const kExtensions = Symbol('opensearchjs-extensions') +const kEventEmitter = Symbol('opensearchjs-event-emitter') -const ESAPI = require('./api') +const OSAPI = require('./api') -class Client extends ESAPI { +class Client extends OSAPI { constructor (opts = {}) { super({ ConfigurationError }) if (opts.cloud && opts[kChild] === undefined) { const { id, username, password } = opts.cloud // the cloud id is `cluster-name:base64encodedurl` // the url is a string divided by two '$', the first is the cloud url - // the second the elasticsearch instance, the third the kibana instance + // the second the opensearch instance, the third the opensearchDashboards instance const cloudUrls = Buffer.from(id.split(':')[1], 'base64').toString().split('$') // TODO: remove username and password here in 8 @@ -73,7 +72,7 @@ class Client extends ESAPI { opts.node = `https://${cloudUrls[1]}.${cloudUrls[0]}` // Cloud has better performances with compression enabled - // see https://github.com/elastic/elasticsearch-py/pull/704. + // see https://github.com/opensearch-project/opensearch-py/pull/704. // So unless the user specifies otherwise, we enable compression. if (opts.compression == null) opts.compression = 'gzip' if (opts.suggestCompression == null) opts.suggestCompression = true @@ -118,7 +117,7 @@ class Client extends ESAPI { nodeFilter: null, nodeSelector: 'round-robin', generateRequestId: null, - name: 'elasticsearch-js', + name: 'opensearch-js', auth: null, opaqueIdPrefix: null, context: null, @@ -127,8 +126,8 @@ class Client extends ESAPI { disablePrototypePoisoningProtection: false }, opts) - if (process.env.ELASTIC_CLIENT_APIVERSIONING === 'true') { - options.headers = Object.assign({ accept: 'application/vnd.elasticsearch+json; compatible-with=7' }, options.headers) + if (process.env.OPENSEARCH_CLIENT_APIVERSIONING === 'true') { + options.headers = Object.assign({ accept: 'application/vnd.opensearch+json; compatible-with=7' }, options.headers) } this[kInitialOptions] = options @@ -185,9 +184,7 @@ class Client extends ESAPI { this.helpers = new Helpers({ client: this, maxRetries: options.maxRetries, - metaHeader: options.enableMetaHeader - ? `es=${clientVersion},js=${nodeVersion},t=${clientVersion},hc=${nodeVersion}` - : null + metaHeader: null }) } diff --git a/lib/Connection.d.ts b/lib/Connection.d.ts index cb818bf77..3df2dafd1 100644 --- a/lib/Connection.d.ts +++ b/lib/Connection.d.ts @@ -104,4 +104,4 @@ export default class Connection { toJSON(): any } -export {}; +export { }; diff --git a/lib/Connection.js b/lib/Connection.js index 9fa681a99..ef8e1ce27 100644 --- a/lib/Connection.js +++ b/lib/Connection.js @@ -35,7 +35,7 @@ const { inspect } = require('util') const hpagent = require('hpagent') const http = require('http') const https = require('https') -const debug = require('debug')('elasticsearch') +const debug = require('debug')('opensearch') const { pipeline } = require('stream') const INVALID_PATH_REGEX = /[^\u0021-\u00ff]/ const { @@ -100,7 +100,7 @@ class Connection { if (INVALID_PATH_REGEX.test(requestParams.path) === true) { callback(new TypeError(`ERR_UNESCAPED_CHARACTERS: ${requestParams.path}`), null) /* istanbul ignore next */ - return { abort: () => {} } + return { abort: () => { } } } debug('Starting a new request', params) @@ -115,7 +115,7 @@ class Connection { const onTimeout = () => { cleanListeners() this._openRequests-- - request.once('error', () => {}) // we need to catch the request aborted error + request.once('error', () => { }) // we need to catch the request aborted error request.abort() callback(new TimeoutError('Request timed out', params), null) } @@ -128,7 +128,7 @@ class Connection { const onAbort = () => { cleanListeners() - request.once('error', () => {}) // we need to catch the request aborted error + request.once('error', () => { }) // we need to catch the request aborted error debug('Request aborted', params) this._openRequests-- callback(new RequestAbortedError(), null) @@ -168,7 +168,7 @@ class Connection { } // TODO: write a better closing logic - close (callback = () => {}) { + close (callback = () => { }) { debug('Closing connection', this.id) if (this._openRequests > 0) { setTimeout(() => this.close(callback), 1000) @@ -217,7 +217,7 @@ class Connection { path: '', href: url.href, origin: url.origin, - // https://github.com/elastic/elasticsearch-js/issues/843 + // https://github.com/opensearch-project/opensearch-js/issues/843 port: url.port !== '' ? url.port : undefined, headers: this.headers, agent: this.agent diff --git a/lib/Helpers.d.ts b/lib/Helpers.d.ts index b4e932d07..f1959c784 100644 --- a/lib/Helpers.d.ts +++ b/lib/Helpers.d.ts @@ -34,7 +34,7 @@ import { Search, Msearch, Bulk } from '../api/requestParams' export default class Helpers { search>(params: Search, options?: TransportRequestOptions): Promise - scrollSearch, TRequestBody extends RequestBody = Record, TContext = Context>(params: Search, options?: TransportRequestOptions): AsyncIterable> + scrollSearch, TRequestBody extends RequestBody = Record, TContext = Context>(params: Search, options?: TransportRequestOptions): AsyncIterable> scrollDocuments>(params: Search, options?: TransportRequestOptions): AsyncIterable msearch(options?: MsearchHelperOptions, reqOptions?: TransportRequestOptions): MsearchHelper bulk(options: BulkHelperOptions, reqOptions?: TransportRequestOptions): BulkHelper @@ -130,6 +130,6 @@ export interface MsearchHelperOptions extends Omit { declare type callbackFn = (err: ApiError, result: ApiResponse) => void; export interface MsearchHelper extends Promise { stop(error?: Error): void - search, TRequestBody extends RequestBody = Record, TContext = Context>(header: Omit, body: TRequestBody): Promise> - search, TRequestBody extends RequestBody = Record, TContext = Context>(header: Omit, body: TRequestBody, callback: callbackFn): void + search, TRequestBody extends RequestBody = Record, TContext = Context>(header: Omit, body: TRequestBody): Promise> + search, TRequestBody extends RequestBody = Record, TContext = Context>(header: Omit, body: TRequestBody, callback: callbackFn): void } diff --git a/lib/Helpers.js b/lib/Helpers.js index a776abdf4..febe8e353 100644 --- a/lib/Helpers.js +++ b/lib/Helpers.js @@ -38,7 +38,7 @@ const { ResponseError, ConfigurationError } = require('./errors') const pImmediate = promisify(setImmediate) const sleep = promisify(setTimeout) -const kClient = Symbol('elasticsearch-client') +const kClient = Symbol('opensearch-client') const kMetaHeader = Symbol('meta header') /* istanbul ignore next */ const noop = () => { } @@ -52,10 +52,10 @@ class Helpers { /** * Runs a search operation. The only difference between client.search and this utility, - * is that we are only returning the hits to the user and not the full ES response. + * is that we are only returning the hits to the user and not the full opensearch response. * This helper automatically adds `filter_path=hits.hits._source` to the querystring, * as it will only need the documents source. - * @param {object} params - The Elasticsearch's search parameters. + * @param {object} params - The OpenSearch's search parameters. * @param {object} options - The client optional configuration for this request. * @return {array} The documents that matched the request. */ @@ -79,7 +79,7 @@ class Helpers { * Each result represents the entire body of a single scroll search request, * if you just need to scroll the results, use scrollDocuments. * This function handles automatically retries on 429 status code. - * @param {object} params - The Elasticsearch's search parameters. + * @param {object} params - The OpenSearch's search parameters. * @param {object} options - The client optional configuration for this request. * @return {iterator} the async iterator */ @@ -162,7 +162,7 @@ class Helpers { * Each document is what you will find by running a scrollSearch and iterating on the hits array. * This helper automatically adds `filter_path=hits.hits._source` to the querystring, * as it will only need the documents source. - * @param {object} params - The Elasticsearch's search parameters. + * @param {object} params - The OpenSearch's search parameters. * @param {object} options - The client optional configuration for this request. * @return {iterator} the async iterator */ @@ -723,7 +723,7 @@ class Helpers { if (status >= 400) { // 429 is the only staus code where we might want to retry // a document, because it was not an error in the document itself, - // but the ES node were handling too many operations. + // but the opensearch node were handling too many operations. if (status === 429) { retry.push(bulkBody[indexSlice]) /* istanbul ignore next */ diff --git a/lib/Serializer.d.ts b/lib/Serializer.d.ts index 995edbe17..976d9b957 100644 --- a/lib/Serializer.d.ts +++ b/lib/Serializer.d.ts @@ -33,7 +33,7 @@ export interface SerializerOptions { } export default class Serializer { - constructor (opts?: SerializerOptions) + constructor(opts?: SerializerOptions) serialize(object: any): string; deserialize(json: string): any; ndserialize(array: any[]): string; diff --git a/lib/Serializer.js b/lib/Serializer.js index 4d4c20903..498640735 100644 --- a/lib/Serializer.js +++ b/lib/Serializer.js @@ -31,7 +31,7 @@ 'use strict' const { stringify } = require('querystring') -const debug = require('debug')('elasticsearch') +const debug = require('debug')('opensearch') const sjson = require('secure-json-parse') const { SerializationError, DeserializationError } = require('./errors') const kJsonOptions = Symbol('secure json parse options') @@ -91,7 +91,7 @@ class Serializer { const keys = Object.keys(object) for (let i = 0, len = keys.length; i < len; i++) { const key = keys[i] - // elasticsearch will complain for keys without a value + // opensearch will complain for keys without a value if (object[key] === undefined) { delete object[key] } else if (Array.isArray(object[key]) === true) { diff --git a/lib/Transport.d.ts b/lib/Transport.d.ts index a88e9ff0f..9a59fc21b 100644 --- a/lib/Transport.d.ts +++ b/lib/Transport.d.ts @@ -35,10 +35,10 @@ import Serializer from './Serializer'; import * as errors from './errors'; export type ApiError = errors.ConfigurationError | errors.ConnectionError | - errors.DeserializationError | errors.SerializationError | - errors.NoLivingConnectionsError | errors.ResponseError | - errors.TimeoutError | errors.RequestAbortedError | - errors.NotCompatibleError + errors.DeserializationError | errors.SerializationError | + errors.NoLivingConnectionsError | errors.ResponseError | + errors.TimeoutError | errors.RequestAbortedError | + errors.NotCompatibleError export type Context = unknown @@ -99,10 +99,10 @@ export interface RequestEvent, TContext = Contex // ApiResponse and RequestEvent are the same thing // we are doing this for have more clear names -export interface ApiResponse, TContext = Context> extends RequestEvent {} +export interface ApiResponse, TContext = Context> extends RequestEvent { } -export type RequestBody> = T | string | Buffer | ReadableStream -export type RequestNDBody[]> = T | string | string[] | Buffer | ReadableStream +export type RequestBody> = T | string | Buffer | ReadableStream +export type RequestNDBody[]> = T | string | string[] | Buffer | ReadableStream export interface TransportRequestParams { method: string; diff --git a/lib/Transport.js b/lib/Transport.js index 62e3c47da..3b709f60a 100644 --- a/lib/Transport.js +++ b/lib/Transport.js @@ -30,7 +30,7 @@ 'use strict' -const debug = require('debug')('elasticsearch') +const debug = require('debug')('opensearch') const os = require('os') const { gzip, unzip, createGzip } = require('zlib') const buffer = require('buffer') @@ -45,11 +45,11 @@ const { NotCompatibleError } = require('./errors') -const noop = () => {} +const noop = () => { } const compatibleCheckEmitter = new EventEmitter() const clientVersion = require('../package.json').version -const userAgent = `elasticsearch-js/${clientVersion} (${os.platform()} ${os.release()}-${os.arch()}; Node.js ${process.version})` +const userAgent = `opensearch-js/${clientVersion} (${os.platform()} ${os.release()}-${os.arch()}; Node.js ${process.version})` const MAX_BUFFER_LENGTH = buffer.constants.MAX_LENGTH const MAX_STRING_LENGTH = buffer.constants.MAX_STRING_LENGTH const kCompatibleCheck = Symbol('compatible check') @@ -81,7 +81,7 @@ class Transport { this.name = opts.name this.opaqueIdPrefix = opts.opaqueIdPrefix this[kCompatibleCheck] = 0 // 0 = to be checked, 1 = checking, 2 = checked-ok, 3 checked-notok - this[kApiVersioning] = process.env.ELASTIC_CLIENT_APIVERSIONING === 'true' + this[kApiVersioning] = process.env.OPENSEARCH_CLIENT_APIVERSIONING === 'true' this.nodeFilter = opts.nodeFilter || defaultNodeFilter if (typeof opts.nodeSelector === 'function') { @@ -317,10 +317,10 @@ class Transport { // - the request is not a HEAD request // - the payload is not an empty string if (result.headers['content-type'] !== undefined && - (result.headers['content-type'].indexOf('application/json') > -1 || - result.headers['content-type'].indexOf('application/vnd.elasticsearch+json') > -1) && - isHead === false && - payload !== '' + (result.headers['content-type'].indexOf('application/json') > -1 || + result.headers['content-type'].indexOf('application/vnd.opensearch+json') > -1) && + isHead === false && + payload !== '' ) { try { result.body = this.serializer.deserialize(payload) @@ -339,7 +339,7 @@ class Transport { (isHead === true && result.statusCode === 404) if (ignoreStatusCode === false && - (result.statusCode === 502 || result.statusCode === 503 || result.statusCode === 504)) { + (result.statusCode === 502 || result.statusCode === 503 || result.statusCode === 504)) { // if the statusCode is 502/3/4 we should run our retry strategy // and mark the connection as dead this.connectionPool.markDead(meta.connection) @@ -393,10 +393,10 @@ class Transport { } if (params.body !== '') { - headers['content-type'] = headers['content-type'] || (this[kApiVersioning] ? 'application/vnd.elasticsearch+json; compatible-with=7' : 'application/json') + headers['content-type'] = headers['content-type'] || (this[kApiVersioning] ? 'application/vnd.opensearch+json; compatible-with=7' : 'application/json') } - // handle ndjson body + // handle ndjson body } else if (params.bulkBody != null) { if (shouldSerialize(params.bulkBody) === true) { try { @@ -410,7 +410,7 @@ class Transport { params.body = params.bulkBody } if (params.body !== '') { - headers['content-type'] = headers['content-type'] || (this[kApiVersioning] ? 'application/vnd.elasticsearch+x-ndjson; compatible-with=7' : 'application/x-ndjson') + headers['content-type'] = headers['content-type'] || (this[kApiVersioning] ? 'application/vnd.opensearch+x-ndjson; compatible-with=7' : 'application/x-ndjson') } } @@ -480,12 +480,12 @@ class Transport { this.checkCompatibleInfo() } } - // the compatible check is finished and it's not Elasticsearch + // the compatible check is finished and it's not OpenSearch } else if (this[kCompatibleCheck] === 3) { const err = new NotCompatibleError(result) this.emit('request', err, result) process.nextTick(callback, err, result) - // the compatible check finished and it's Elasticsearch + // the compatible check finished and it's OpenSearch } else { prepareRequest() } @@ -561,16 +561,16 @@ class Transport { debug('compatible check failed', err) if (err.statusCode === 401 || err.statusCode === 403) { this[kCompatibleCheck] = 2 - process.emitWarning('The client is unable to verify that the server is Elasticsearch due to security privileges on the server side. Some functionality may not be compatible if the server is running an unsupported product.') + process.emitWarning('The client is unable to verify that the server is OpenSearchdue to security privileges on the server side. Some functionality may not be compatible if the server is running an unsupported product.') compatibleCheckEmitter.emit('compatible-check', true) } else { this[kCompatibleCheck] = 0 compatibleCheckEmitter.emit('compatible-check', false) } } else { - debug('Checking elasticsearch version', result.body, result.headers) + debug('Checking opensearch version', result.body, result.headers) if (result.body.version == null || typeof result.body.version.number !== 'string') { - debug('Can\'t access Elasticsearch version') + debug('Can\'t access OpenSearchversion') return compatibleCheckEmitter.emit('compatible-check', false) } @@ -587,11 +587,11 @@ class Transport { // support odfe > v7 validation if (major !== 7) { - debug('Invalid Elasticsearch distribution') + debug('Invalid OpenSearchdistribution') return compatibleCheckEmitter.emit('compatible-check', false) } - debug('Valid Elasticsearch distribution') + debug('Valid OpenSearchdistribution') this[kCompatibleCheck] = 2 compatibleCheckEmitter.emit('compatible-check', true) } @@ -616,8 +616,8 @@ function toMs (time) { function shouldSerialize (obj) { return typeof obj !== 'string' && - typeof obj.pipe !== 'function' && - Buffer.isBuffer(obj) === false + typeof obj.pipe !== 'function' && + Buffer.isBuffer(obj) === false } function isStream (obj) { @@ -627,8 +627,8 @@ function isStream (obj) { function defaultNodeFilter (node) { // avoid master only nodes if (node.roles.master === true && - node.roles.data === false && - node.roles.ingest === false) { + node.roles.data === false && + node.roles.ingest === false) { return false } return true diff --git a/lib/errors.d.ts b/lib/errors.d.ts index fa3d1af77..12b6bb54c 100644 --- a/lib/errors.d.ts +++ b/lib/errors.d.ts @@ -30,53 +30,53 @@ import { ApiResponse, Context } from './Transport' -export declare class ElasticsearchClientError extends Error { +export declare class OpenSearchClientError extends Error { name: string; message: string; } -export declare class TimeoutError, TContext = Context> extends ElasticsearchClientError { +export declare class TimeoutError, TContext = Context> extends OpenSearchClientError { name: string; message: string; meta: ApiResponse; constructor(message: string, meta: ApiResponse); } -export declare class ConnectionError, TContext = Context> extends ElasticsearchClientError { +export declare class ConnectionError, TContext = Context> extends OpenSearchClientError { name: string; message: string; meta: ApiResponse; constructor(message: string, meta: ApiResponse); } -export declare class NoLivingConnectionsError, TContext = Context> extends ElasticsearchClientError { +export declare class NoLivingConnectionsError, TContext = Context> extends OpenSearchClientError { name: string; message: string; meta: ApiResponse; constructor(message: string, meta: ApiResponse); } -export declare class SerializationError extends ElasticsearchClientError { +export declare class SerializationError extends OpenSearchClientError { name: string; message: string; data: any; constructor(message: string, data: any); } -export declare class DeserializationError extends ElasticsearchClientError { +export declare class DeserializationError extends OpenSearchClientError { name: string; message: string; data: string; constructor(message: string, data: string); } -export declare class ConfigurationError extends ElasticsearchClientError { +export declare class ConfigurationError extends OpenSearchClientError { name: string; message: string; constructor(message: string); } -export declare class ResponseError, TContext = Context> extends ElasticsearchClientError { +export declare class ResponseError, TContext = Context> extends OpenSearchClientError { name: string; message: string; meta: ApiResponse; @@ -86,14 +86,14 @@ export declare class ResponseError, TContext = C constructor(meta: ApiResponse); } -export declare class RequestAbortedError, TContext = Context> extends ElasticsearchClientError { +export declare class RequestAbortedError, TContext = Context> extends OpenSearchClientError { name: string; message: string; meta: ApiResponse; constructor(message: string, meta: ApiResponse); } -export declare class NotCompatibleError, TContext = Context> extends ElasticsearchClientError { +export declare class NotCompatibleError, TContext = Context> extends OpenSearchClientError { name: string; message: string; meta: ApiResponse; diff --git a/lib/errors.js b/lib/errors.js index 7fe4a7313..7b10adab2 100644 --- a/lib/errors.js +++ b/lib/errors.js @@ -30,14 +30,14 @@ 'use strict' -class ElasticsearchClientError extends Error { +class OpenSearchClientError extends Error { constructor (message) { super(message) - this.name = 'ElasticsearchClientError' + this.name = 'OpenSearchClientError' } } -class TimeoutError extends ElasticsearchClientError { +class TimeoutError extends OpenSearchClientError { constructor (message, meta) { super(message) Error.captureStackTrace(this, TimeoutError) @@ -47,7 +47,7 @@ class TimeoutError extends ElasticsearchClientError { } } -class ConnectionError extends ElasticsearchClientError { +class ConnectionError extends OpenSearchClientError { constructor (message, meta) { super(message) Error.captureStackTrace(this, ConnectionError) @@ -57,7 +57,7 @@ class ConnectionError extends ElasticsearchClientError { } } -class NoLivingConnectionsError extends ElasticsearchClientError { +class NoLivingConnectionsError extends OpenSearchClientError { constructor (message, meta) { super(message) Error.captureStackTrace(this, NoLivingConnectionsError) @@ -67,7 +67,7 @@ class NoLivingConnectionsError extends ElasticsearchClientError { } } -class SerializationError extends ElasticsearchClientError { +class SerializationError extends OpenSearchClientError { constructor (message, data) { super(message, data) Error.captureStackTrace(this, SerializationError) @@ -77,7 +77,7 @@ class SerializationError extends ElasticsearchClientError { } } -class DeserializationError extends ElasticsearchClientError { +class DeserializationError extends OpenSearchClientError { constructor (message, data) { super(message, data) Error.captureStackTrace(this, DeserializationError) @@ -87,7 +87,7 @@ class DeserializationError extends ElasticsearchClientError { } } -class ConfigurationError extends ElasticsearchClientError { +class ConfigurationError extends OpenSearchClientError { constructor (message) { super(message) Error.captureStackTrace(this, ConfigurationError) @@ -96,7 +96,7 @@ class ConfigurationError extends ElasticsearchClientError { } } -class ResponseError extends ElasticsearchClientError { +class ResponseError extends OpenSearchClientError { constructor (meta) { super('Response Error') Error.captureStackTrace(this, ResponseError) @@ -134,7 +134,7 @@ class ResponseError extends ElasticsearchClientError { } } -class RequestAbortedError extends ElasticsearchClientError { +class RequestAbortedError extends OpenSearchClientError { constructor (message, meta) { super(message) Error.captureStackTrace(this, RequestAbortedError) @@ -144,18 +144,18 @@ class RequestAbortedError extends ElasticsearchClientError { } } -class NotCompatibleError extends ElasticsearchClientError { +class NotCompatibleError extends OpenSearchClientError { constructor (meta) { super('Not Compatible Error') Error.captureStackTrace(this, NotCompatibleError) this.name = 'NotCompatibleError' - this.message = 'The client noticed that the server is not a supported distribution of Elasticsearch' + this.message = 'The client noticed that the server is not a supported distribution of OpenSearch' this.meta = meta } } module.exports = { - ElasticsearchClientError, + OpenSearchClientError, TimeoutError, ConnectionError, NoLivingConnectionsError, diff --git a/lib/pool/BaseConnectionPool.js b/lib/pool/BaseConnectionPool.js index bdcf0abf0..05243415a 100644 --- a/lib/pool/BaseConnectionPool.js +++ b/lib/pool/BaseConnectionPool.js @@ -31,9 +31,9 @@ 'use strict' const { URL } = require('url') -const debug = require('debug')('elasticsearch') +const debug = require('debug')('opensearch') const Connection = require('../Connection') -const noop = () => {} +const noop = () => { } class BaseConnectionPool { constructor (opts) { @@ -172,10 +172,10 @@ class BaseConnectionPool { debug(`The connection with id '${node.id}' is already present`) this.markAlive(connectionById) newConnections.push(connectionById) - // in case the user has passed a single url (or an array of urls), - // the connection id will be the full href; to avoid closing valid connections - // because are not present in the pool, we check also the node url, - // and if is already present we update its id with the ES provided one. + // in case the user has passed a single url (or an array of urls), + // the connection id will be the full href; to avoid closing valid connections + // because are not present in the pool, we check also the node url, + // and if is already present we update its id with the opensearch provided one. } else if (connectionByUrl) { connectionByUrl.id = node.id this.markAlive(connectionByUrl) diff --git a/lib/pool/ConnectionPool.js b/lib/pool/ConnectionPool.js index 7e5b32f69..7fca12550 100644 --- a/lib/pool/ConnectionPool.js +++ b/lib/pool/ConnectionPool.js @@ -32,9 +32,9 @@ const BaseConnectionPool = require('./BaseConnectionPool') const assert = require('assert') -const debug = require('debug')('elasticsearch') +const debug = require('debug')('opensearch') const Connection = require('../Connection') -const noop = () => {} +const noop = () => { } class ConnectionPool extends BaseConnectionPool { constructor (opts) { @@ -152,7 +152,7 @@ class ConnectionPool extends BaseConnectionPool { let isAlive = true const statusCode = response !== null ? response.statusCode : 0 if (err != null || - (statusCode === 502 || statusCode === 503 || statusCode === 504)) { + (statusCode === 502 || statusCode === 503 || statusCode === 504)) { debug(`Resurrect: connection '${id}' is still dead`) this.markDead(connection) isAlive = false @@ -169,7 +169,7 @@ class ConnectionPool extends BaseConnectionPool { }) callback(isAlive, connection) }) - // optimistic strategy + // optimistic strategy } else { debug(`Resurrect: optimistic resurrection for connection '${id}'`) this.dead.splice(this.dead.indexOf(id), 1) diff --git a/package.json b/package.json index d5f8e47c5..66f30cc85 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { - "name": "@elastic/elasticsearch", - "description": "The official Elasticsearch client for Node.js", + "name": "@opensearch/opensearch", + "description": "The official OpenSearch client for Node.js", "main": "index.js", "types": "index.d.ts", "exports": { @@ -14,9 +14,8 @@ "version": "7.14.0", "versionCanary": "7.14.0-canary.6", "keywords": [ - "elasticsearch", - "elastic", - "kibana", + "opensearch", + "opensearchDashboards", "mapping", "REST", "search", diff --git a/scripts/download-artifacts.js b/scripts/download-artifacts.js index f86fa171a..87a1625ca 100644 --- a/scripts/download-artifacts.js +++ b/scripts/download-artifacts.js @@ -45,11 +45,11 @@ const pipeline = promisify(stream.pipeline) const unzip = promisify(crossZip.unzip) const rm = promisify(rimraf) -const esFolder = join(__dirname, '..', 'elasticsearch') -const zipFolder = join(esFolder, 'artifacts.zip') -const specFolder = join(esFolder, 'rest-api-spec', 'api') -const ossTestFolder = join(esFolder, 'rest-api-spec', 'test', 'oss') -const artifactInfo = join(esFolder, 'info.json') +const osFolder = join(__dirname, '..', 'opensearch') +const zipFolder = join(osFolder, 'artifacts.zip') +const specFolder = join(osFolder, 'rest-api-spec', 'api') +const ossTestFolder = join(osFolder, 'rest-api-spec', 'test', 'oss') +const artifactInfo = join(osFolder, 'info.json') async function downloadArtifacts (opts) { if (typeof opts.version !== 'string') { @@ -80,9 +80,9 @@ async function downloadArtifacts (opts) { } } - log.text = 'Cleanup checkouts/elasticsearch' - await rm(esFolder) - await mkdir(esFolder, { recursive: true }) + log.text = 'Cleanup checkouts/opensearch' + await rm(osFolder) + await mkdir(osFolder, { recursive: true }) log.text = 'Downloading artifacts' const response = await fetch(resolved.url) @@ -93,7 +93,7 @@ async function downloadArtifacts (opts) { await pipeline(response.body, createWriteStream(zipFolder)) log.text = 'Unzipping' - await unzip(zipFolder, esFolder) + await unzip(zipFolder, osFolder) log.text = 'Cleanup' await rm(zipFolder) @@ -113,17 +113,17 @@ function loadInfo () { } async function resolve (version, hash) { - const response = await fetch(`https://artifacts-api.elastic.co/v1/versions/${version}`) + const response = await fetch(`https://artifacts-api.opensearch.co/v1/versions/${version}`) if (!response.ok) { throw new Error(`unexpected response ${response.statusText}`) } const data = await response.json() const esBuilds = data.version.builds - .filter(build => build.projects.elasticsearch != null) + .filter(build => build.projects.opensearch != null) .map(build => { return { - projects: build.projects.elasticsearch, + projects: build.projects.opensearch, buildId: build.build_id, date: build.start_time, version: build.version diff --git a/scripts/es-docker.sh b/scripts/es-docker.sh deleted file mode 100755 index f1747654b..000000000 --- a/scripts/es-docker.sh +++ /dev/null @@ -1,38 +0,0 @@ -#!/bin/bash - -# Images are cached locally, it may be needed -# to delete an old image and download again -# the latest snapshot. - -# pass `--clean` to reemove the old snapshot -if [ "$1" == "--clean" ]; then - docker rmi $(docker images --format '{{.Repository}}:{{.Tag}}' | grep '8.0.0-SNAPSHOT') -fi - -# Create the 'elastic' network if doesn't exist -exec docker network ls | grep elastic > /dev/null || docker network create elastic > /dev/null - -if [ "$1" == "--detach" ]; then - exec docker run \ - --rm \ - -e "node.attr.testattr=test" \ - -e "path.repo=/tmp" \ - -e "repositories.url.allowed_urls=http://snapshot.*" \ - -e "discovery.type=single-node" \ - -p 9200:9200 \ - --detach \ - --network=elastic \ - --name=elasticsearch \ - docker.elastic.co/elasticsearch/elasticsearch:7.4.0 -else - exec docker run \ - --rm \ - -e "node.attr.testattr=test" \ - -e "path.repo=/tmp" \ - -e "repositories.url.allowed_urls=http://snapshot.*" \ - -e "discovery.type=single-node" \ - -p 9200:9200 \ - --network=elastic \ - --name=elasticsearch \ - docker.elastic.co/elasticsearch/elasticsearch:7.4.0 -fi diff --git a/scripts/kibana-docker.sh b/scripts/kibana-docker.sh deleted file mode 100755 index e768962c8..000000000 --- a/scripts/kibana-docker.sh +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/bash - -exec docker run \ - --rm \ - -e ELASTICSEARCH_URL="http://opensearch:9200" \ - -p 5601:5601 \ - --network=elastic \ - docker.elastic.co/kibana/kibana:7.0.0 diff --git a/scripts/release-canary.js b/scripts/release-canary.js index f9bda6c01..57f743262 100644 --- a/scripts/release-canary.js +++ b/scripts/release-canary.js @@ -37,7 +37,7 @@ async function release (opts) { const newCanaryVersion = `${originalVersion.split('-')[0]}-canary.${newCanaryInteger}` // Update the package.json with the correct name and new version - packageJson.name = '@elastic/elasticsearch-canary' + packageJson.name = '@opensearch/opensearch-canary' packageJson.version = newCanaryVersion packageJson.versionCanary = newCanaryVersion packageJson.types = './api/new.d.ts' @@ -50,9 +50,9 @@ async function release (opts) { 'utf8' ) - // update the npmignore to publish the kibana types as well + // update the npmignore to publish the opensearchDashboards types as well const newNpmIgnore = originalNpmIgnore.slice(0, originalNpmIgnore.indexOf('# CANARY-PACKAGE')) + - originalNpmIgnore.slice(originalNpmIgnore.indexOf('# /CANARY-PACKAGE') + 17) + originalNpmIgnore.slice(originalNpmIgnore.indexOf('# /CANARY-PACKAGE') + 17) await writeFile( join(__dirname, '..', '.npmignore'), newNpmIgnore, diff --git a/scripts/utils/clone-es.js b/scripts/utils/clone-opensearch.js similarity index 79% rename from scripts/utils/clone-es.js rename to scripts/utils/clone-opensearch.js index 0d494fca0..f2d703376 100644 --- a/scripts/utils/clone-es.js +++ b/scripts/utils/clone-opensearch.js @@ -34,36 +34,36 @@ const { accessSync, mkdirSync } = require('fs') const { join } = require('path') const Git = require('simple-git') -const esRepo = 'https://github.com/opensearch-project/opensearch-js.git' -const esFolder = join(__dirname, '..', '..', 'elasticsearch') -const apiFolder = join(esFolder, 'rest-api-spec', 'src', 'main', 'resources', 'rest-api-spec', 'api') +const osRepo = 'https://github.com/opensearch-project/opensearch.git' +const osFolder = join(__dirname, '..', '..', 'opensearch') +const apiFolder = join(osFolder, 'rest-api-spec', 'src', 'main', 'resources', 'rest-api-spec', 'api') -function cloneAndCheckout (opts, callback) { +function cloneAndCheckout(opts, callback) { const { log, tag, branch } = opts withTag(tag, callback) /** - * Sets the elasticsearch repository to the given tag. - * If the repository is not present in `esFolder` it will + * Sets the opensearch repository to the given tag. + * If the repository is not present in `osFolder` it will * clone the repository and the checkout the tag. * If the repository is already present but it cannot checkout to * the given tag, it will perform a pull and then try again. * @param {string} tag * @param {function} callback */ - function withTag (tag, callback) { + function withTag(tag, callback) { let fresh = false let retry = 0 - if (!pathExist(esFolder)) { - if (!createFolder(esFolder)) { + if (!pathExist(osFolder)) { + if (!createFolder(osFolder)) { log.fail('Failed folder creation') return } fresh = true } - const git = Git(esFolder) + const git = Git(osFolder) if (fresh) { clone(checkout) @@ -73,7 +73,7 @@ function cloneAndCheckout (opts, callback) { checkout() } - function checkout (alsoPull = false) { + function checkout(alsoPull = false) { if (branch) { log.text = `Checking out branch '${branch}'` } else { @@ -94,8 +94,8 @@ function cloneAndCheckout (opts, callback) { }) } - function pull (cb) { - log.text = 'Pulling elasticsearch repository...' + function pull(cb) { + log.text = 'Pulling opensearch repository...' git.pull(err => { if (err) { callback(err, { apiFolder }) @@ -105,9 +105,9 @@ function cloneAndCheckout (opts, callback) { }) } - function clone (cb) { - log.text = 'Cloning elasticsearch repository...' - git.clone(esRepo, esFolder, err => { + function clone(cb) { + log.text = 'Cloning opensearch repository...' + git.clone(osRepo, osFolder, err => { if (err) { callback(err, { apiFolder }) return @@ -122,7 +122,7 @@ function cloneAndCheckout (opts, callback) { * @param {string} path * @returns {boolean} true if exists, false if not */ - function pathExist (path) { + function pathExist(path) { try { accessSync(path) return true @@ -136,7 +136,7 @@ function cloneAndCheckout (opts, callback) { * @param {string} name * @returns {boolean} true on success, false on failure */ - function createFolder (name) { + function createFolder(name) { try { mkdirSync(name) return true diff --git a/scripts/utils/generateDocs.js b/scripts/utils/generateDocs.js index fd591e35f..85d21d768 100644 --- a/scripts/utils/generateDocs.js +++ b/scripts/utils/generateDocs.js @@ -76,11 +76,11 @@ function generateDocs (common, spec) { == API Reference - This document contains the entire list of the Elasticsearch API supported by the client, both OSS and commercial. The client is entirely licensed under Apache 2.0. + This document contains the entire list of the OpenSearchAPI supported by the client, both OSS and commercial. The client is entirely licensed under Apache 2.0. - Elasticsearch exposes an HTTP layer to communicate with, and the client is a library that will help you do this. Because of this reason, you will see HTTP related parameters, such as ${'`'}body${'`'} or ${'`'}headers${'`'}. + OpenSearchexposes an HTTP layer to communicate with, and the client is a library that will help you do this. Because of this reason, you will see HTTP related parameters, such as ${'`'}body${'`'} or ${'`'}headers${'`'}. - Every API can accept two objects, the first contains all the parameters that will be sent to Elasticsearch, while the second includes the request specific parameters, such as timeouts, headers, and so on. + Every API can accept two objects, the first contains all the parameters that will be sent to OpenSearch, while the second includes the request specific parameters, such as timeouts, headers, and so on. In the first object, every parameter but the body will be sent via querystring or url parameter, depending on the API, and every unrecognized parameter will be sent as querystring. [source,js] @@ -276,8 +276,8 @@ function fixLink (name, str) { if (override) return override if (!str) return '' /* Replace references to the guide with the attribute {ref} because - * the json files in the Elasticsearch repo are a bit of a mess. */ - str = str.replace(/^.+guide\/en\/elasticsearch\/reference\/[^/]+\/([^./]*\.html(?:#.+)?)$/, '{ref}/$1') + * the json files in the OpenSearchrepo are a bit of a mess. */ + str = str.replace(/^.+guide\/en\/opensearch\/reference\/[^/]+\/([^./]*\.html(?:#.+)?)$/, '{ref}/$1') str = str.replace(/frozen\.html/, 'freeze-index-api.html') str = str.replace(/ml-file-structure\.html/, 'ml-find-file-structure.html') str = str.replace(/security-api-get-user-privileges\.html/, 'security-api-get-privileges.html') diff --git a/scripts/utils/generateMain.js b/scripts/utils/generateMain.js index e508ac67f..82b1d8477 100644 --- a/scripts/utils/generateMain.js +++ b/scripts/utils/generateMain.js @@ -60,7 +60,7 @@ function genFactory (folder, specFolder, namespaces) { const spec = readSpec(specFolder, file.slice(0, -5)) const isHead = isHeadMethod(spec, file.slice(0, -5)) const body = hasBody(spec, file.slice(0, -5)) - const methods = acc === null ? buildMethodDefinition({ kibana: false }, val, name, body, isHead, spec) : null + const methods = acc === null ? buildMethodDefinition({ opensearchDashboards: false }, val, name, body, isHead, spec) : null const obj = {} if (methods) { for (const m of methods) { @@ -77,7 +77,7 @@ function genFactory (folder, specFolder, namespaces) { }) .reduce((acc, val) => deepmerge(acc, val), {}) - const kibanaTypes = apiFiles + const opensearchDashboardsTypes = apiFiles .map(file => { const name = file .slice(0, -5) @@ -92,7 +92,7 @@ function genFactory (folder, specFolder, namespaces) { const spec = readSpec(specFolder, file.slice(0, -5)) const isHead = isHeadMethod(spec, file.slice(0, -5)) const body = hasBody(spec, file.slice(0, -5)) - const methods = acc === null ? buildMethodDefinition({ kibana: true }, val, name, body, isHead, spec) : null + const methods = acc === null ? buildMethodDefinition({ opensearchDashboards: true }, val, name, body, isHead, spec) : null const obj = {} if (methods) { for (const m of methods) { @@ -119,9 +119,9 @@ function genFactory (folder, specFolder, namespaces) { // remove useless quotes and commas .replace(/"/g, '') .replace(/,$/gm, '') - const kibanaTypesStr = Object.keys(kibanaTypes) + const opensearchDashboardsTypesStr = Object.keys(opensearchDashboardsTypes) .map(key => { - const line = ` ${key}: ${JSON.stringify(kibanaTypes[key], null, 4)}` + const line = ` ${key}: ${JSON.stringify(opensearchDashboardsTypes[key], null, 4)}` if (line.slice(-1) === '}') { return line.slice(0, -1) + ' }' } @@ -148,14 +148,14 @@ function genFactory (folder, specFolder, namespaces) { getters.push(`${namespace}: { get () { return this.${camelify(namespace)} } },\n`) } } else { - apisStr += `ESAPI.prototype.${camelify(namespace)} = ${camelify(namespace)}Api\n` + apisStr += `OSAPI.prototype.${camelify(namespace)} = ${camelify(namespace)}Api\n` if (namespace.includes('_')) { getters.push(`${namespace}: { get () { return this.${camelify(namespace)} } },\n`) } } } - apisStr += '\nObject.defineProperties(ESAPI.prototype, {\n' + apisStr += '\nObject.defineProperties(OSAPI.prototype, {\n' for (const getter of getters) { apisStr += getter } @@ -201,18 +201,18 @@ function genFactory (folder, specFolder, namespaces) { const { kConfigurationError } = require('./utils') ${symbols} - function ESAPI (opts) { + function OSAPI (opts) { this[kConfigurationError] = opts.ConfigurationError ${symbolsInstance} } ${apisStr} - module.exports = ESAPI + module.exports = OSAPI ` // new line at the end of file - return { fn: fn + '\n', types: typesStr, kibanaTypes: kibanaTypesStr } + return { fn: fn + '\n', types: typesStr, opensearchDashboardsTypes: opensearchDashboardsTypesStr } } // from snake_case to camelCase @@ -235,7 +235,7 @@ function buildMethodDefinition (opts, api, name, hasBody, isHead, spec) { const responseType = isHead ? 'boolean' : 'Record' const defaultBodyType = content_type && content_type.includes('application/x-ndjson') ? 'Record[]' : 'Record' - if (opts.kibana) { + if (opts.opensearchDashboards) { if (hasBody) { return [ { key: `${camelify(api)}(params?: RequestParams.${Name}, options?: TransportRequestOptions)`, val: 'TransportRequestPromise>' } diff --git a/scripts/utils/index.js b/scripts/utils/index.js index 8bb3bb53e..17ea52f1c 100644 --- a/scripts/utils/index.js +++ b/scripts/utils/index.js @@ -31,7 +31,7 @@ 'use strict' const generate = require('./generateApis') -const cloneAndCheckout = require('./clone-es') +const cloneAndCheckout = require('./clone-opensearch') const genFactory = require('./generateMain') const generateDocs = require('./generateDocs') const generateRequestTypes = require('./generateRequestTypes') diff --git a/scripts/wait-cluster.sh b/scripts/wait-cluster.sh index 4cacaa4b6..aa6274720 100755 --- a/scripts/wait-cluster.sh +++ b/scripts/wait-cluster.sh @@ -6,10 +6,10 @@ attempt_counter=0 max_attempts=5 url="${TEST_ES_SERVER}/_cluster/health?wait_for_status=green&timeout=50s" -echo "Waiting for Elasticsearch..." +echo "Waiting for OpenSearch..." while [[ "$(curl -s -o /dev/null -w ''%{http_code}'' --max-time 55 "$url")" != "200" ]]; do if [ ${attempt_counter} -eq ${max_attempts} ];then - echo "\nCouldn't connect to Elasticsearch" + echo "\nCouldn't connect to OpenSearch" exit 1 fi diff --git a/test/acceptance/events-order.test.js b/test/acceptance/events-order.test.js index e9030d599..131f446dc 100644 --- a/test/acceptance/events-order.test.js +++ b/test/acceptance/events-order.test.js @@ -377,7 +377,7 @@ test('Deserialization Error', t => { } stream.on('close', () => t.pass('Stream destroyed')) process.nextTick(callback, null, stream) - return { abort () {} } + return { abort () { } } } } diff --git a/test/acceptance/observability.test.js b/test/acceptance/observability.test.js index 8c5763a51..f3c66b567 100644 --- a/test/acceptance/observability.test.js +++ b/test/acceptance/observability.test.js @@ -18,7 +18,7 @@ const { Client, connection: { MockConnection, MockConnectionSniff } } = require('../utils') -const noop = () => {} +const noop = () => { } test('Request id', t => { t.test('Default generateRequestId', t => { @@ -322,7 +322,7 @@ test('Client name', t => { client.on('sniff', (err, { meta }) => { t.error(err) - t.equal(meta.name, 'elasticsearch-js') + t.equal(meta.name, 'opensearch-js') }) }) @@ -337,15 +337,15 @@ test('Client name', t => { }) client.on('request', (e, { meta }) => { - t.equal(meta.name, 'elasticsearch-js') + t.equal(meta.name, 'opensearch-js') }) client.on('response', (e, { meta }) => { - t.equal(meta.name, 'elasticsearch-js') + t.equal(meta.name, 'opensearch-js') }) client.on('sniff', (e, { meta }) => { - t.equal(meta.name, 'elasticsearch-js') + t.equal(meta.name, 'opensearch-js') }) client.transport.request({ @@ -376,7 +376,7 @@ test('Client name', t => { client.on('resurrect', (err, meta) => { t.error(err) - t.equal(meta.name, 'elasticsearch-js') + t.equal(meta.name, 'opensearch-js') clock.uninstall() }) diff --git a/test/acceptance/product-check.test.js b/test/acceptance/product-check.test.js index 81aba00e0..3b645f703 100644 --- a/test/acceptance/product-check.test.js +++ b/test/acceptance/product-check.test.js @@ -139,7 +139,7 @@ test('Errors not v7', t => { if (req.method === 'GET') { t.error(err) } else { - t.equal(err.message, 'The client noticed that the server is not a supported distribution of Elasticsearch') + t.equal(err.message, 'The client noticed that the server is not a supported distribution of OpenSearch') } }) @@ -151,7 +151,7 @@ test('Errors not v7', t => { } } }, (err, result) => { - t.equal(err.message, 'The client noticed that the server is not a supported distribution of Elasticsearch') + t.equal(err.message, 'The client noticed that the server is not a supported distribution of OpenSearch') }) }) @@ -228,7 +228,7 @@ test('Auth error - 401', t => { process.on('warning', onWarning) function onWarning (warning) { - t.equal(warning.message, 'The client is unable to verify that the server is Elasticsearch due to security privileges on the server side. Some functionality may not be compatible if the server is running an unsupported product.') + t.equal(warning.message, 'The client is unable to verify that the server is OpenSearchdue to security privileges on the server side. Some functionality may not be compatible if the server is running an unsupported product.') } const requests = [{ @@ -279,7 +279,7 @@ test('Auth error - 403', t => { process.on('warning', onWarning) function onWarning (warning) { - t.equal(warning.message, 'The client is unable to verify that the server is Elasticsearch due to security privileges on the server side. Some functionality may not be compatible if the server is running an unsupported product.') + t.equal(warning.message, 'The client is unable to verify that the server is OpenSearchdue to security privileges on the server side. Some functionality may not be compatible if the server is running an unsupported product.') } const requests = [{ @@ -379,7 +379,7 @@ test('500 error', t => { } } }, (err, result) => { - t.equal(err.message, 'The client noticed that the server is not a supported distribution of Elasticsearch') + t.equal(err.message, 'The client noticed that the server is not a supported distribution of OpenSearch') client.search({ index: 'foo', @@ -416,7 +416,7 @@ test('TimeoutError', t => { if (req.method === 'GET') { t.error(err) } else { - t.equal(err.message, 'The client noticed that the server is not a supported distribution of Elasticsearch') + t.equal(err.message, 'The client noticed that the server is not a supported distribution of OpenSearch') } }) @@ -428,7 +428,7 @@ test('TimeoutError', t => { } } }, (err, result) => { - t.equal(err.message, 'The client noticed that the server is not a supported distribution of Elasticsearch') + t.equal(err.message, 'The client noticed that the server is not a supported distribution of OpenSearch') }) }) @@ -557,7 +557,7 @@ test('Multiple subsequent calls, with errors', t => { if (req.method === 'GET') { t.error(err) } else { - t.equal(err.message, 'The client noticed that the server is not a supported distribution of Elasticsearch') + t.equal(err.message, 'The client noticed that the server is not a supported distribution of OpenSearch') } }) @@ -569,11 +569,11 @@ test('Multiple subsequent calls, with errors', t => { } } }, (err, result) => { - t.equal(err.message, 'The client noticed that the server is not a supported distribution of Elasticsearch') + t.equal(err.message, 'The client noticed that the server is not a supported distribution of OpenSearch') }) client.ping((err, result) => { - t.equal(err.message, 'The client noticed that the server is not a supported distribution of Elasticsearch') + t.equal(err.message, 'The client noticed that the server is not a supported distribution of OpenSearch') }) client.index({ @@ -582,7 +582,7 @@ test('Multiple subsequent calls, with errors', t => { foo: 'bar' } }, (err, result) => { - t.equal(err.message, 'The client noticed that the server is not a supported distribution of Elasticsearch') + t.equal(err.message, 'The client noticed that the server is not a supported distribution of OpenSearch') }) }) @@ -705,7 +705,7 @@ test('Later errored call', t => { if (req.method === 'GET') { t.error(err) } else { - t.equal(err.message, 'The client noticed that the server is not a supported distribution of Elasticsearch') + t.equal(err.message, 'The client noticed that the server is not a supported distribution of OpenSearch') } }) @@ -717,7 +717,7 @@ test('Later errored call', t => { } } }, (err, result) => { - t.equal(err.message, 'The client noticed that the server is not a supported distribution of Elasticsearch') + t.equal(err.message, 'The client noticed that the server is not a supported distribution of OpenSearch') }) setTimeout(() => { @@ -729,7 +729,7 @@ test('Later errored call', t => { } } }, (err, result) => { - t.equal(err.message, 'The client noticed that the server is not a supported distribution of Elasticsearch') + t.equal(err.message, 'The client noticed that the server is not a supported distribution of OpenSearch') }) }, 100) }) @@ -767,7 +767,7 @@ test('Bad info response', t => { if (req.method === 'GET') { t.error(err) } else { - t.equal(err.message, 'The client noticed that the server is not a supported distribution of Elasticsearch') + t.equal(err.message, 'The client noticed that the server is not a supported distribution of OpenSearch') } }) @@ -779,7 +779,7 @@ test('Bad info response', t => { } } }, (err, result) => { - t.equal(err.message, 'The client noticed that the server is not a supported distribution of Elasticsearch') + t.equal(err.message, 'The client noticed that the server is not a supported distribution of OpenSearch') }) }) diff --git a/test/acceptance/resurrect.test.js b/test/acceptance/resurrect.test.js index 023ebb1f5..689c3bea2 100644 --- a/test/acceptance/resurrect.test.js +++ b/test/acceptance/resurrect.test.js @@ -70,7 +70,7 @@ test('Should execute the recurrect API with the ping strategy', t => { t.equal(meta.strategy, 'ping') t.notOk(meta.isAlive) t.equal(meta.connection.id, 'node0') - t.equal(meta.name, 'elasticsearch-js') + t.equal(meta.name, 'opensearch-js') t.same(meta.request, { id: 2 }) }) @@ -130,7 +130,7 @@ test('Resurrect a node and handle 502/3/4 status code', t => { t.error(err) t.equal(meta.strategy, 'ping') t.equal(meta.connection.id, 'node0') - t.equal(meta.name, 'elasticsearch-js') + t.equal(meta.name, 'opensearch-js') t.same(meta.request, { id: idCount++ }) if (count < 4) { t.notOk(meta.isAlive) @@ -193,7 +193,7 @@ test('Should execute the recurrect API with the optimistic strategy', t => { t.equal(meta.strategy, 'optimistic') t.ok(meta.isAlive) t.equal(meta.connection.id, 'node0') - t.equal(meta.name, 'elasticsearch-js') + t.equal(meta.name, 'opensearch-js') t.same(meta.request, { id: 2 }) }) diff --git a/test/benchmarks/macro/complex.bench.js b/test/benchmarks/macro/complex.bench.js index f8c5f9757..c2827d4d3 100644 --- a/test/benchmarks/macro/complex.bench.js +++ b/test/benchmarks/macro/complex.bench.js @@ -40,7 +40,7 @@ const stackoverflowInfo = { } const INDEX = 'stackoverflow' -const node = process.env.ELASTICSEARCH_URL || 'http://localhost:9200' +const node = process.env.OPENSEARCH_URL || 'http://localhost:9200' const client = new Client({ node }) diff --git a/test/benchmarks/macro/simple.bench.js b/test/benchmarks/macro/simple.bench.js index f6d93d650..84f4c1f7d 100644 --- a/test/benchmarks/macro/simple.bench.js +++ b/test/benchmarks/macro/simple.bench.js @@ -22,7 +22,7 @@ const { bench, beforeEach, afterEach } = require('../suite')({ } }) -const node = process.env.ELASTICSEARCH_URL || 'http://localhost:9200' +const node = process.env.OPENSEARCH_URL || 'http://localhost:9200' const smallDocument = require('./fixtures/small_document.json') const smallDocumentInfo = { diff --git a/test/benchmarks/suite.js b/test/benchmarks/suite.js index 779700286..80f7b8148 100644 --- a/test/benchmarks/suite.js +++ b/test/benchmarks/suite.js @@ -113,11 +113,11 @@ function buildBenchmark (options = {}) { // still need to warmup if (warmup-- > 0) { process.nextTick(run) - // save the actual measure + // save the actual measure } else if (measure-- > 0) { stats[title].push(convertHrtime(b.time)) process.nextTick(run) - // calculate the statistics + // calculate the statistics } else { done() } @@ -192,8 +192,8 @@ function buildBenchmark (options = {}) { const git = Git(__dirname) const commit = await git.log(['-1']) const branch = await git.revparse(['--abbrev-ref', 'HEAD']) - const { body: esInfo } = await client.info() - const { body: esNodes } = await client.nodes.stats({ metric: 'os' }) + const { body: osInfo } = await client.info() + const { body: osNodes } = await client.nodes.stats({ metric: 'os' }) const results = reports.map(report => { return { @@ -213,12 +213,12 @@ function buildBenchmark (options = {}) { }, agent: { version: clientVersion, - name: '@elastic/elasticsearch-js', + name: '@opensearch/opensearch-js', git: { branch: branch.slice(0, -1), sha: commit.latest.hash, commit_message: commit.latest.message, - repository: 'elasticsearch-js' + repository: 'opensearch-js' }, language: { version: process.version @@ -230,8 +230,8 @@ function buildBenchmark (options = {}) { } }, server: { - version: esInfo.version.number, - nodes_info: esNodes + version: osInfo.version.number, + nodes_info: osNodes } } }) diff --git a/test/fixtures/stackoverflow.ndjson b/test/fixtures/stackoverflow.ndjson index 3872524cd..bb6883bc0 100644 --- a/test/fixtures/stackoverflow.ndjson +++ b/test/fixtures/stackoverflow.ndjson @@ -166,7 +166,7 @@ {"id":"10226378","title":"Cakephp 2.1 Form Error","body":"\u003cp\u003eI have just started using cakePHP 2.1. After submiting a form. If there is a validation error how to check the params whether there is an error ?\u003c/p\u003e\n\n\u003cp\u003eBefore we used to do something like \u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e$this-\u0026gt;data['params'];\n\u003c/code\u003e\u003c/pre\u003e","accepted_answer_id":"10227397","answer_count":"2","comment_count":"1","creation_date":"2012-04-19 10:43:32.037 UTC","last_activity_date":"2012-04-20 06:46:49.393 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"155196","post_type_id":"1","score":"0","tags":"cakephp|cakephp-2.1","view_count":"317"} {"id":"3809811","title":"How to find Gmail mailboxes in other languages","body":"\u003cp\u003eI need to select the 'Sent' and 'Spam' folders in Gmail, but I have an account that does not work. I've found out later that the folders are in non-English language, but it doesn't seem to represent the unicode characters of those language either. Does anybody know how to find out which one is the 'Sent' or 'Spam' folder of a Gmail account?\u003c/p\u003e","accepted_answer_id":"3846248","answer_count":"3","comment_count":"0","creation_date":"2010-09-28 05:03:00.727 UTC","favorite_count":"1","last_activity_date":"2017-04-17 06:57:34.94 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"188912","post_type_id":"1","score":"3","tags":"gmail|imap","view_count":"945"} {"id":"10272924","title":"How to fix this java.util.Scanner.next, throwing java.util.NoSuchElementException","body":"\u003cp\u003eI am filtering new lines but first time it worked on second loop its failing for \u003ccode\u003eline 2\u003c/code\u003e filering.\u003c/p\u003e\n\n\u003cp\u003eException:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003erun:\na[line 1]l[0]\nException in thread \"main\" java.util.NoSuchElementException\na[line 2]l[1]\na[line 3]l[2]\nb[line 1]l[0]\nb[line 3]l[1]\n at java.util.Scanner.throwFor(Scanner.java:855)\n at java.util.Scanner.next(Scanner.java:1364)\n at ui.Test.main(Test.java:82)\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eCode:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e// a) Lines\nString a = \"line 1\\n\\r\" +\n \"line 2\\n\\r\" +\n \"line 3\\n\\r\"; \n// b) Total length \nint len = 0; \nScanner sc = new Scanner(a).useDelimiter(\"\\r?\\n\\r|\\\\|\");\nwhile (sc.hasNext()) {\n System.out.println(\"a[\" + sc.next() + \"]l[\" + len + \"]\" ); \n len++;\n}\n\n// c) Prepare array \nString[] value;\nvalue = new String[len+1];\nlen = 0; \nsc = new Scanner(a).useDelimiter(\"\\r?\\n\\r|\\\\|\");\nwhile (sc.hasNext()) {\n System.out.println(\"b[\" + sc.next() + \"]l[\" + len + \"]\" ); \n value[len] = sc.next();\n len++;\n}\n\n// d) Goal - use the value for JComboBox\nSystem.out.println(value);\n\u003c/code\u003e\u003c/pre\u003e","accepted_answer_id":"10272942","answer_count":"1","comment_count":"1","creation_date":"2012-04-22 23:02:41.63 UTC","last_activity_date":"2012-04-22 23:06:22.563 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"285594","post_type_id":"1","score":"1","tags":"java|java.util.scanner|jcombobox","view_count":"2194"} -{"id":"34708996","title":"Servlet add dynamically?","body":"\u003cp\u003eI have the following request:\u003c/p\u003e\n\n\u003cp\u003eI want to creat with follow request(POST)\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003elocalhost:8080/Anwendung/bla?createServlet=machine\u0026amp;input=2\u0026amp;name\u0026amp;state\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003ea new „request possibility“ After i executed the above URL i can add with follow POST\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003elocalhost:8080/Anwendung/machine?name =\u0026amp;state=off\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003ea new machine in a database. Now i can get with a GET request:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003elocalhost:8080/Anwendung/machine?name\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003ethe machine as JSON File.\u003c/p\u003e\n\n\u003cp\u003eMy first idea is that i make for every „request possibility“ a new servlet, which i add dynamically at the runtime. After i made the above example i have „machine.java“, which i compiling and add(hot deployment(??)) in my server(tomcat).\nIt es possible? \nI think there is a better possibility\u003c/p\u003e","answer_count":"0","comment_count":"0","creation_date":"2016-01-10 18:10:08.243 UTC","favorite_count":"0","last_activity_date":"2016-01-10 18:10:08.243 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"5770509","post_type_id":"1","score":"1","tags":"http|servlets|dynamically-generated","view_count":"22"} +{"id":"34708996","title":"Servlet add dynamically?","body":"\u003cp\u003eI have the following request:\u003c/p\u003e\n\n\u003cp\u003eI want to creat with follow request(POST)\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003elocalhost:8080/Anwendung/bla?createServlet=machine\u0026amp;input=2\u0026amp;name\u0026amp;state\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003ea new „request possibility“ After i executed the above URL i can add with follow POST\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003elocalhost:8080/Anwendung/machine?name =\u0026amp;state=off\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003ea new machine in a database. Now i can get with a GET request:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003elocalhost:8080/Anwendung/machine?name\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003ethe machine as JSON File.\u003c/p\u003e\n\n\u003cp\u003eMy first idea is that i make for every „request possibility“ a new servlet, which i add dynamically at the runtime. After i made the above example i have „machine.java“, which i compiling and add(hot deployment(??)) in my server(tomcat).\nIt opensearch possible? \nI think there is a better possibility\u003c/p\u003e","answer_count":"0","comment_count":"0","creation_date":"2016-01-10 18:10:08.243 UTC","favorite_count":"0","last_activity_date":"2016-01-10 18:10:08.243 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"5770509","post_type_id":"1","score":"1","tags":"http|servlets|dynamically-generated","view_count":"22"} {"id":"136129","title":"Windows Forms: How do you change the font color for a disabled label","body":"\u003cp\u003eI am trying to set the disabled font characteristics for a Label Control. I can set all of the Font characteristics (size, bold, etc), but the color is overridden by the default windows behavior which seems to be one of these two colors:\u003c/p\u003e\n\n\u003cul\u003e\n\u003cli\u003eIf background color is transparent then ForeColor is same as TextBox disabled Color.\u003c/li\u003e\n\u003cli\u003eIf background color is set to anything else, ForeColor is a Dark Gray color.\u003c/li\u003e\n\u003c/ul\u003e\n\n\u003cp\u003eThe image below demonstrates the behavior -- Column 1 is Labels, Column 2 is TextBoxs, and Column 3 is ComboBoxes.\u003c/p\u003e\n\n\u003cp\u003e\u003ca href=\"http://highplainstech.com/images/testForm.png\"\u003ealt text http://highplainstech.com/images/testForm.png\u003c/a\u003e\u003c/p\u003e\n\n\u003cp\u003eEdit -- Explaining the image: The first two rows are default styles for a label, textbox, and combobox. In the second two rows, I set the Background color to Red and Foreground to White. The disabled font style handling by Microsoft is inconsistent.\u003c/p\u003e","accepted_answer_id":"136265","answer_count":"7","comment_count":"0","creation_date":"2008-09-25 21:05:01.597 UTC","favorite_count":"3","last_activity_date":"2013-06-19 16:31:59.783 UTC","last_edit_date":"2008-09-26 14:06:36.077 UTC","last_editor_display_name":"mistrmark","last_editor_user_id":"19242","owner_display_name":"mistrmark","owner_user_id":"19242","post_type_id":"1","score":"11","tags":"winforms","view_count":"19391"} {"id":"17466467","title":"How to check with java, if a specific XML Element are existing in a XML Document","body":"\u003cp\u003eI have the following question:\u003c/p\u003e\n\n\u003cp\u003eI would like to check, whether a XML document contains a specific XML element. Is it possible to check, for example with a java method of a specific API, which returns a boolean value, wheter a specific XML element are available in a XML document?\u003c/p\u003e\n\n\u003cp\u003eThis is my XML document as example:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e\u0026lt;Test xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\u0026gt;\n \u0026lt;ServiceRequest\u0026gt; \n \u0026lt;RequestPayload\u0026gt;\n \u0026lt;LocationInformationRequest\u0026gt;\n \u0026lt;InitialInput\u0026gt;\n \u0026lt;GeoRestriction\u0026gt;\n \u0026lt;Area\u0026gt;\n \u0026lt;PolylinePoint\u0026gt;\n \u0026lt;Longitude\u0026gt;11.0\u0026lt;/Longitude\u0026gt;\n \u0026lt;Latitude\u0026gt;12.0\u0026lt;/Latitude\u0026gt;\n \u0026lt;Altitude\u0026gt;13.0\u0026lt;/Altitude\u0026gt;\n \u0026lt;/PolylinePoint\u0026gt; \n \u0026lt;/Area\u0026gt;\n \u0026lt;/GeoRestriction\u0026gt;\n \u0026lt;/InitialInput\u0026gt;\n \u0026lt;/LocationInformationRequest\u0026gt;\n \u0026lt;/RequestPayload\u0026gt;\n \u0026lt;/ServiceRequest\u0026gt;\n\u0026lt;/Test\u0026gt;\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eI need the information as a boolean value, wheter the XML element Area are existing or not existing. The XML Document is used in my own java classes as a type of string.\u003c/p\u003e\n\n\u003cp\u003eThanks for help !\u003c/p\u003e","answer_count":"5","comment_count":"0","creation_date":"2013-07-04 09:16:53.223 UTC","favorite_count":"3","last_activity_date":"2013-07-08 12:51:18.5 UTC","last_edit_date":"2013-07-04 09:26:48.4 UTC","last_editor_display_name":"","last_editor_user_id":"831531","owner_display_name":"","owner_user_id":"2504767","post_type_id":"1","score":"8","tags":"java|xml","view_count":"18620"} {"id":"17939694","title":"Using Valgrind on an Embedded project","body":"\u003cp\u003eCurrently I am working on an Embedded Project. I am using IAR Embedded Workbench IDE and target platform is 8051-based microcontroller. Is it possible to use Valgrind tool to check the code I wrote?\u003c/p\u003e","accepted_answer_id":"17983567","answer_count":"2","comment_count":"3","creation_date":"2013-07-30 06:24:26.523 UTC","favorite_count":"1","last_activity_date":"2013-08-01 01:16:35.527 UTC","last_edit_date":"2013-07-30 08:34:16.61 UTC","last_editor_display_name":"","last_editor_user_id":"1961634","owner_display_name":"","owner_user_id":"720239","post_type_id":"1","score":"5","tags":"embedded|valgrind","view_count":"2271"} @@ -328,7 +328,7 @@ {"id":"22054301","title":"How to properly index MongoDB queries with multiple $and and $or statements","body":"\u003cp\u003eI have a collection in MongoDB (app_logins) that hold documents with the following structure:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e{\n \"_id\" : \"c8535f1bd2404589be419d0123a569de\"\n \"app\" : \"MyAppName\",\n \"start\" : ISODate(\"2014-02-26T14:00:03.754Z\"),\n \"end\" : ISODate(\"2014-02-26T15:11:45.558Z\")\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eSince the documentation says that the queries in an $or can be executed in parallel and can use separate indices, and I assume the same holds true for $and, I added the following indices:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003edb.app_logins.ensureIndex({app:1})\ndb.app_logins.ensureIndex({start:1})\ndb.app_logins.ensureIndex({end:1})\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eBut when I do a query like this, way too many documents are scanned:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003edb.app_logins.find(\n{\n $and:[\n { app : \"MyAppName\" },\n {\n $or:[\n {\n $and:[\n { start : { $gte:new Date(1393425621000) }},\n { start : { $lte:new Date(1393425639875) }}\n ]\n },\n {\n $and:[\n { end : { $gte:new Date(1393425621000) }},\n { end : { $lte:new Date(1393425639875) }}\n ]\n },\n {\n $and:[\n { start : { $lte:new Date(1393425639875) }},\n { end : { $gte:new Date(1393425621000) }}\n ]\n }\n ]\n }\n ]\n}\n).explain()\n\n{\n \"cursor\" : \"BtreeCursor app_1\",\n \"isMultiKey\" : true,\n \"n\" : 138,\n \"nscannedObjects\" : 10716598,\n \"nscanned\" : 10716598,\n \"nscannedObjectsAllPlans\" : 10716598,\n \"nscannedAllPlans\" : 10716598,\n \"scanAndOrder\" : false,\n \"indexOnly\" : false,\n \"nYields\" : 30658,\n \"nChunkSkips\" : 0,\n \"millis\" : 38330,\n \"indexBounds\" : {\n \"app\" : [\n [\n \"MyAppName\",\n \"MyAppName\"\n ]\n ]\n },\n \"server\" : \"127.0.0.1:27017\"\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eI know that this can be caused because 10716598 match the 'app' field, but the other query can return a much smaller subset.\u003c/p\u003e\n\n\u003cp\u003eIs there any way I can optimize this? The aggregation framework comes to mind, but I was thinking that there may be a better way to optimize this, possibly using indexes.\u003c/p\u003e\n\n\u003cp\u003e\u003cstrong\u003eEdit:\u003c/strong\u003e\u003c/p\u003e\n\n\u003cp\u003eLooks like if I add an index on app-start-end, as Josh suggested, I am getting better results. I am not sure if I can optimize this further this way, but the results are much better:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e{\n \"cursor\" : \"BtreeCursor app_1_start_1_end_1\",\n \"isMultiKey\" : false,\n \"n\" : 138,\n \"nscannedObjects\" : 138,\n \"nscanned\" : 8279154,\n \"nscannedObjectsAllPlans\" : 138,\n \"nscannedAllPlans\" : 8279154,\n \"scanAndOrder\" : false,\n \"indexOnly\" : false,\n \"nYields\" : 2934,\n \"nChunkSkips\" : 0,\n \"millis\" : 13539,\n \"indexBounds\" : {\n \"app\" : [\n [\n \"MyAppName\",\n \"MyAppName\"\n ]\n ],\n \"start\" : [\n [\n {\n \"$minElement\" : 1\n },\n {\n \"$maxElement\" : 1\n }\n ]\n ],\n \"end\" : [\n [\n {\n \"$minElement\" : 1\n },\n {\n \"$maxElement\" : 1\n }\n ]\n ]\n },\n \"server\" : \"127.0.0.1:27017\"\n}\n\u003c/code\u003e\u003c/pre\u003e","accepted_answer_id":"22054871","answer_count":"2","comment_count":"3","creation_date":"2014-02-26 21:59:35.957 UTC","last_activity_date":"2014-02-27 03:47:40.007 UTC","last_edit_date":"2014-02-26 22:17:31.547 UTC","last_editor_display_name":"","last_editor_user_id":"333918","owner_display_name":"","owner_user_id":"333918","post_type_id":"1","score":"0","tags":"mongodb","view_count":"105"} {"id":"45313360","title":"getDisplayName() and getPhotoUrl() without signing in user to android firebase","body":"\u003cp\u003eI am creating a login page in android using Firebase Email Password Authentication and I want when a user enters its Email address and shift to password the system automatically get the PhotoUrl and DisplayName and display on the Login page Before a user enters His Full Password.\u003c/p\u003e","answer_count":"1","comment_count":"3","creation_date":"2017-07-25 20:58:38.357 UTC","last_activity_date":"2017-07-25 22:02:32.61 UTC","last_edit_date":"2017-07-25 21:56:59.87 UTC","last_editor_display_name":"","last_editor_user_id":"5246885","owner_display_name":"","owner_user_id":"8366127","post_type_id":"1","score":"0","tags":"android|firebase|firebase-authentication|firebase-storage","view_count":"119"} {"id":"44604596","title":"Need to open permission to 766 to let PHP to edit files","body":"\u003cp\u003eAs the title described, I have tried making the file which I need to edit permission to 764 and it didn't work.\u003cbr\u003e\nI don't have permission to the php config and other main configuration, do I have any options other than using 766?\nAdditionally, will 766 let other to edit my file over HTTP?\u003c/p\u003e","answer_count":"1","comment_count":"3","creation_date":"2017-06-17 12:22:37.48 UTC","last_activity_date":"2017-06-17 13:47:27.22 UTC","last_edit_date":"2017-06-17 13:47:27.22 UTC","last_editor_display_name":"","last_editor_user_id":"7901773","owner_display_name":"","owner_user_id":"7901773","post_type_id":"1","score":"1","tags":"php|apache|file-permissions","view_count":"43"} -{"id":"34293411","title":"Elasticsearch parent - child mapping: Search in both and highlight","body":"\u003cp\u003eI have the following elasticsearch 1.6.2 index mappings: parent \u003cstrong\u003eitem\u003c/strong\u003e and child \u003cstrong\u003edocument\u003c/strong\u003e. One item can have several documents. Documents are \u003cstrong\u003enot\u003c/strong\u003e nested because they contain base64 data (mapper-attachments-plugin) and cannot be updated with an item.\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e\"mappings\" : {\n \"document\" : {\n \"_parent\" : {\n \"type\" : \"item\"\n }, \n \"_routing\" : {\n \"required\" : true\n },\n \"properties\" : {\n \"extension\" : {\n \"type\" : \"string\",\n \"term_vector\" : \"with_positions_offsets\", \n \"include_in_all\" : true\n }, ...\n },\n }\n \"item\" : { \n \"properties\" : {\n \"prop1\" : {\n \"type\" : \"string\",\n \"include_in_all\" : true\n }, ...\n }\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eI like to search in \u003cstrong\u003eboth\u003c/strong\u003e indices but always return \u003cstrong\u003eitems\u003c/strong\u003e. If there is a match in an document, return the corresponding item. If there is a match in an item, return the item. If both is true, return the item. \u003c/p\u003e\n\n\u003cp\u003eIs it possible to combine \u003cstrong\u003ehas_child\u003c/strong\u003e and \u003cstrong\u003ehas_parent\u003c/strong\u003e searches?\u003c/p\u003e\n\n\u003cp\u003eThis search only searches in documents and returns items:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e{\n \"query\": {\n \"has_child\": {\n \"type\": \"document\",\n \"query\": {\n \"query_string\":{\"query\":\"her*}\n },\n \"inner_hits\" : {\n \"highlight\" : {\n \"fields\" : {\n \"*\" : {} \n }\n }\n }\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003e\u003cstrong\u003eEXAMPLE\u003c/strong\u003e\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eGET index/item/174\n{\n \"_type\" : \"item\",\n \"_id\" : \"174\",\n \"_source\":{\"prop1\":\"Perjeta construction\"}\n}\n\nGET index/document/116\n{\n \"_type\" : \"document\",\n \"_id\" : \"116\", \n \"_source\":{\"extension\":\"pdf\",\"item\": {\"id\":174},\"fileName\":\"construction plan\"}\n} \n\n__POSSIBLE SEARCH RESULT searching for \"constr*\"__\n\n{\n\"hits\": {\n \"total\": 1,\n \"hits\": [\n {\n \"_type\": \"item\",\n \"_id\": \"174\",\n \"_source\": {\n \"prop1\": \"Perjeta construction\"\n },\n \"highlight\": {\n \"prop1\": [\n \"Perjeta \u0026lt;em\u0026gt;construction\u0026lt;\\/em\u0026gt;\"\n ]\n },\n \"inner_hits\": {\n \"document\": {\n \"hits\": {\n \"hits\": [\n {\n \"_type\": \"document\",\n \"_id\": \"116\",\n \"_source\": {\n \"extension\": \"pdf\",\n \"item\": {\n \"id\": 174\n }, \n \"fileName\": \"construction plan\"\n },\n \"highlight\": {\n \"fileName\": [\n \"\u0026lt;em\u0026gt;construction\u0026lt;\\/em\u0026gt; plan\"\n ]\n }\n }\n ]\n }\n }\n }\n }\n ]\n}\n}\n\u003c/code\u003e\u003c/pre\u003e","accepted_answer_id":"35230048","answer_count":"1","comment_count":"1","creation_date":"2015-12-15 15:44:25.833 UTC","last_activity_date":"2016-02-05 17:28:08.867 UTC","last_edit_date":"2015-12-15 16:12:35.24 UTC","last_editor_display_name":"","last_editor_user_id":"1056504","owner_display_name":"","owner_user_id":"1056504","post_type_id":"1","score":"0","tags":"elasticsearch","view_count":"181"} +{"id":"34293411","title":"OpenSearchparent - child mapping: Search in both and highlight","body":"\u003cp\u003eI have the following opensearch 1.6.2 index mappings: parent \u003cstrong\u003eitem\u003c/strong\u003e and child \u003cstrong\u003edocument\u003c/strong\u003e. One item can have several documents. Documents are \u003cstrong\u003enot\u003c/strong\u003e nested because they contain base64 data (mapper-attachments-plugin) and cannot be updated with an item.\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e\"mappings\" : {\n \"document\" : {\n \"_parent\" : {\n \"type\" : \"item\"\n }, \n \"_routing\" : {\n \"required\" : true\n },\n \"properties\" : {\n \"extension\" : {\n \"type\" : \"string\",\n \"term_vector\" : \"with_positions_offsets\", \n \"include_in_all\" : true\n }, ...\n },\n }\n \"item\" : { \n \"properties\" : {\n \"prop1\" : {\n \"type\" : \"string\",\n \"include_in_all\" : true\n }, ...\n }\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eI like to search in \u003cstrong\u003eboth\u003c/strong\u003e indices but always return \u003cstrong\u003eitems\u003c/strong\u003e. If there is a match in an document, return the corresponding item. If there is a match in an item, return the item. If both is true, return the item. \u003c/p\u003e\n\n\u003cp\u003eIs it possible to combine \u003cstrong\u003ehas_child\u003c/strong\u003e and \u003cstrong\u003ehas_parent\u003c/strong\u003e searches?\u003c/p\u003e\n\n\u003cp\u003eThis search only searches in documents and returns items:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e{\n \"query\": {\n \"has_child\": {\n \"type\": \"document\",\n \"query\": {\n \"query_string\":{\"query\":\"her*}\n },\n \"inner_hits\" : {\n \"highlight\" : {\n \"fields\" : {\n \"*\" : {} \n }\n }\n }\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003e\u003cstrong\u003eEXAMPLE\u003c/strong\u003e\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eGET index/item/174\n{\n \"_type\" : \"item\",\n \"_id\" : \"174\",\n \"_source\":{\"prop1\":\"Perjeta construction\"}\n}\n\nGET index/document/116\n{\n \"_type\" : \"document\",\n \"_id\" : \"116\", \n \"_source\":{\"extension\":\"pdf\",\"item\": {\"id\":174},\"fileName\":\"construction plan\"}\n} \n\n__POSSIBLE SEARCH RESULT searching for \"constr*\"__\n\n{\n\"hits\": {\n \"total\": 1,\n \"hits\": [\n {\n \"_type\": \"item\",\n \"_id\": \"174\",\n \"_source\": {\n \"prop1\": \"Perjeta construction\"\n },\n \"highlight\": {\n \"prop1\": [\n \"Perjeta \u0026lt;em\u0026gt;construction\u0026lt;\\/em\u0026gt;\"\n ]\n },\n \"inner_hits\": {\n \"document\": {\n \"hits\": {\n \"hits\": [\n {\n \"_type\": \"document\",\n \"_id\": \"116\",\n \"_source\": {\n \"extension\": \"pdf\",\n \"item\": {\n \"id\": 174\n }, \n \"fileName\": \"construction plan\"\n },\n \"highlight\": {\n \"fileName\": [\n \"\u0026lt;em\u0026gt;construction\u0026lt;\\/em\u0026gt; plan\"\n ]\n }\n }\n ]\n }\n }\n }\n }\n ]\n}\n}\n\u003c/code\u003e\u003c/pre\u003e","accepted_answer_id":"35230048","answer_count":"1","comment_count":"1","creation_date":"2015-12-15 15:44:25.833 UTC","last_activity_date":"2016-02-05 17:28:08.867 UTC","last_edit_date":"2015-12-15 16:12:35.24 UTC","last_editor_display_name":"","last_editor_user_id":"1056504","owner_display_name":"","owner_user_id":"1056504","post_type_id":"1","score":"0","tags":"opensearch","view_count":"181"} {"id":"12093896","title":"Taking a picture and then emailing it","body":"\u003cp\u003eI am trying to create an application where you can take a picture and then email it to someone. At the moment I can take a picture and set my background as this picture:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003epublic class Camera extends Activity implements View.OnClickListener{\n\n\nImageButton ib;\nButton b;\nImageView iv;\nIntent i;\nfinal static int cameraData = 0;\nBitmap bmp;\n\n@Override\nprotected void onCreate(Bundle savedInstanceState) {\n // TODO Auto-generated method stub\n super.onCreate(savedInstanceState);\n setContentView(R.layout.photo);\n initialize();\n InputStream is = getResources().openRawResource(R.drawable.ic_launcher);\n bmp = BitmapFactory.decodeStream(is);\n}\n\nprivate void initialize(){\n ib = (ImageButton) findViewById(R.id.ibTakePic);\n b = (Button) findViewById(R.id.bSetWall);\n iv = (ImageView) findViewById(R.id.ivReturnedPic);\n b.setOnClickListener(this);\n ib.setOnClickListener(this);\n\n\n\n}\n\n@Override\npublic void onClick(View v) {\n File mImageFile;\n // TODO Auto-generated method stub\n switch(v.getId()){\n case R.id.bSetWall:\n try {\n getApplicationContext().setWallpaper(bmp);\n } catch (IOException e) {\n // TODO Auto-generated catch block\n e.printStackTrace();\n }\n\n break;\n case R.id.ibTakePic:\n i = new Intent(android.provider.MediaStore.ACTION_IMAGE_CAPTURE);\n startActivityForResult(i, cameraData);\n break;\n }\n\n}\n\n@Override\nprotected void onActivityResult(int requestCode, int resultCode, Intent data) {\n // TODO Auto-generated method stub\n super.onActivityResult(requestCode, resultCode, data);\n if(resultCode == RESULT_OK){\n Bundle extras = data.getExtras();\n bmp = (Bitmap)extras.get(\"data\");\n iv.setImageBitmap(bmp);\n }\n}\n\n\n\n\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eI have a separate application where I can take in user input and email it to a predefined address:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e public void onClick(View v) {\n // TODO Auto-generated method stub\n convertEditTextVarsIntoStringsAndYesThisIsAMethodWeCreated();\n String emailaddress[] = { \"info@sklep.com\", \"\", };\n String message = emailAdd + name + beginning;\n\n Intent emailIntent = new Intent(android.content.Intent.ACTION_SEND);\n emailIntent.putExtra(android.content.Intent.EXTRA_EMAIL, emailaddress);\n\n emailIntent.setType(\"plain/text\");\n emailIntent.putExtra(android.content.Intent.EXTRA_TEXT, message);\n startActivity(emailIntent);\n\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eHow do I go about emailing the picture that I have taken? Where is it saved and how do I access it so that I can email it?\u003c/p\u003e\n\n\u003cp\u003eMany Thanks\u003c/p\u003e","accepted_answer_id":"12094137","answer_count":"2","comment_count":"0","creation_date":"2012-08-23 14:25:13.293 UTC","last_activity_date":"2013-11-08 05:45:17.12 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"876343","post_type_id":"1","score":"0","tags":"android|android-intent|android-camera","view_count":"143"} {"id":"29457499","title":"In open cv, how can i convert gray scale image back in to RGB image(color)","body":"\u003cp\u003eIn open cv to remove background, using current frame and former frame, i applied absdiff function and created a difference image in gray scale. However, i would like to covert the gray scale image back in to RGB with actual color of the image, but i have no idea how to operate this back in.\nI'm using C++.\nCould any one knowledgeable of open cv help me?\u003c/p\u003e","answer_count":"1","comment_count":"5","creation_date":"2015-04-05 12:55:45.5 UTC","last_activity_date":"2015-04-05 15:25:41.797 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"3455085","post_type_id":"1","score":"0","tags":"c++|image|opencv","view_count":"242"} {"id":"10008551","title":"How to write test cases for assignment","body":"\u003cp\u003eThe part of my assignment is to create tests for each function. This ones kinda long but I am so confused. I put a link below this function so you can see how it looks like\u003cbr\u003e\nfirst code is extremely long because.\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003edef load_profiles(profiles_file, person_to_friends, person_to_networks):\n '''(file, dict of {str : list of strs}, dict of {str : list of strs}) -\u0026gt; NoneType\n Update person to friends and person to networks dictionaries to include\n the data in open file.'''\n\n # for updating person_to_friends dict\n update_p_to_f(profiles_file, person_to_friends)\n update_p_to_n(profiles_file, person_to_networks)\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eheres the whole code: \u003ca href=\"http://shrib.com/8EF4E8Z3\" rel=\"nofollow\"\u003ehttp://shrib.com/8EF4E8Z3\u003c/a\u003e, I tested it through mainblock and it works. \nThis is the text file(profiles_file) we were provided that we are using to convert them :\n\u003ca href=\"http://shrib.com/zI61fmNP\" rel=\"nofollow\"\u003ehttp://shrib.com/zI61fmNP\u003c/a\u003e\u003c/p\u003e\n\n\u003cp\u003eHow do I run test cases for this through nose, what kinda of test outcomes are there? Or am I not being specific enough? \u003c/p\u003e\n\n\u003cp\u003eimport nose\nimport a3_functions\u003c/p\u003e\n\n\u003cp\u003edef test_load_profiles_\u003c/p\u003e\n\n\u003cp\u003eif \u003cstrong\u003ename\u003c/strong\u003e == '\u003cstrong\u003emain\u003c/strong\u003e':\n nose.runmodule()\nI went that far then I didn't know what I can test for the function. \u003c/p\u003e","answer_count":"2","comment_count":"8","creation_date":"2012-04-04 09:36:30.373 UTC","last_activity_date":"2012-10-03 05:32:10.363 UTC","last_edit_date":"2012-10-03 05:32:10.363 UTC","last_editor_display_name":"","last_editor_user_id":"1118932","owner_display_name":"","owner_user_id":"1172182","post_type_id":"1","score":"-2","tags":"python|testing","view_count":"544"} @@ -728,7 +728,7 @@ {"id":"41901878","title":"From which api should i ask permissions?","body":"\u003cp\u003eHey I am trying to ask for storage access permission and I notice that in my other phone which has android 5.0 the permission ask crashes the app. what should I do to ask permission without crashing the app in this android version and from which android version should I do it?\u003c/p\u003e\n\n\u003cp\u003ethis is the code for asking permission:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e int MY_PERMISSIONS_REQUEST_READ_EXTERNAL_STORAGE = 0;\n if (checkSelfPermission(Manifest.permission.READ_EXTERNAL_STORAGE)\n != PackageManager.PERMISSION_GRANTED) {\n\n // Should we show an explanation?\n if (shouldShowRequestPermissionRationale(\n Manifest.permission.READ_EXTERNAL_STORAGE)) {\n // Explain to the user why we need to read the contacts\n }\n\n requestPermissions(new String[]{Manifest.permission.READ_EXTERNAL_STORAGE},\n MY_PERMISSIONS_REQUEST_READ_EXTERNAL_STORAGE);\n\n // MY_PERMISSIONS_REQUEST_READ_EXTERNAL_STORAGE is an\n // app-defined int constant that should be quite unique\n\n return;\n}\n\u003c/code\u003e\u003c/pre\u003e","answer_count":"1","comment_count":"2","creation_date":"2017-01-27 19:46:49.193 UTC","last_activity_date":"2017-01-27 19:56:55.73 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"7415791","post_type_id":"1","score":"-2","tags":"java|android|api|permissions|android-permissions","view_count":"38"} {"id":"25708004","title":"Looping over descending values in Stata using forvalues","body":"\u003cp\u003eSo this works as expected:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e. forvalues i = 1(1)3 {\n 2. di `i'\n 3. }\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cblockquote\u003e\n \u003cp\u003e1\u003cbr\u003e\n 2\u003cbr\u003e\n 3\u003c/p\u003e\n\u003c/blockquote\u003e\n\n\u003cp\u003eAnd this doesn't:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e. forvalues i = 3(1)1 {\n 2. di `i'\n 3. }\n \u0026lt;--- that's an empty line that returns from the above loop.\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eIf I want to produce \u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e3\n2\n1\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003edo I really need to get this belabored?\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e. forvalues i = 1(1)3 {\n 2. di 3+1-`i'\n 3. }\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eWhy?\u003c/p\u003e","accepted_answer_id":"25708275","answer_count":"1","comment_count":"4","creation_date":"2014-09-07 07:21:08.937 UTC","favorite_count":"1","last_activity_date":"2014-09-07 08:13:05.737 UTC","last_edit_date":"2014-09-07 08:13:05.737 UTC","last_editor_display_name":"","last_editor_user_id":"1820446","owner_display_name":"","owner_user_id":"3133336","post_type_id":"1","score":"3","tags":"stata","view_count":"1742"} {"id":"34081370","title":"GitHub API: finding number of stars,commits and releases","body":"\u003cp\u003eI'm making Android app that use GitHud API. The target -- make list with repositories info of githubuser. I use GET /users/:username/repos. But there no all info I need. Maybe someone know how can I get this info for the list of repositories.\u003c/p\u003e","answer_count":"1","comment_count":"0","creation_date":"2015-12-04 05:17:19.56 UTC","last_activity_date":"2015-12-04 08:58:36.8 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"5637732","post_type_id":"1","score":"0","tags":"android|json|github-api","view_count":"107"} -{"id":"21790023","title":"Navigation Timing API. What's going on between domContentLoadedEventStart and domContentLoadedEventEnd?","body":"\u003cp\u003eW3C specifies a list of event and their corresponding timings that user agents must return if they want to support the \u003ca href=\"https://dvcs.w3.org/hg/webperf/raw-file/tip/specs/NavigationTiming/Overview.html\"\u003eNavigation Timing API\u003c/a\u003e. \u003c/p\u003e\n\n\u003cp\u003eA list you can see here: \u003ca href=\"http://www.w3.org/TR/navigation-timing/#process\"\u003ehttp://www.w3.org/TR/navigation-timing/#process\u003c/a\u003e\u003c/p\u003e\n\n\u003cp\u003eUnderstanding which process relates to which events is pretty straight forward in most cases. But one thing that eludes me is what is going on between \u003ccode\u003edomContentLoadedEventStart\u003c/code\u003e and \u003ccode\u003edomContentLoadedEventEnd\u003c/code\u003e.\u003c/p\u003e\n\n\u003cp\u003eHere is what I have understood so far and base my reflections on:\u003c/p\u003e\n\n\u003col\u003e\n\u003cli\u003e\u003ccode\u003edomLoading\u003c/code\u003e // The UA starts parsing the document.\u003c/li\u003e\n\u003cli\u003e\u003ccode\u003edomInteractive\u003c/code\u003e // The UA has finished parsing the document. Users\ncan interact with the page.\u003c/li\u003e\n\u003cli\u003e\u003ccode\u003edomContentLoaded\u003c/code\u003e // The document has been completely loaded and\nparsed and deferred scripts, if any, have executed. (Async scripts,\nif any, might or might not have executed???)\u003c/li\u003e\n\u003cli\u003e\u003ccode\u003edomComplete\u003c/code\u003e // The DOM Tree is completely built. Async scripts, if\nany, have executed.\u003c/li\u003e\n\u003cli\u003e\u003ccode\u003eloadEventEnd\u003c/code\u003e // The UA has a fully completed page. All resources,\nlike images, swf, etc, have loaded.\u003c/li\u003e\n\u003c/ol\u003e\n\n\u003cp\u003eOne should be able to deduce what happens after phase #3 (\u003ccode\u003edomContentLoaded\u003c/code\u003e) by understanding what triggered event #4 (\u003ccode\u003edomComplete\u003c/code\u003e) but did not trigger previous events. \u003c/p\u003e\n\n\u003cp\u003eSo one would think that “Async scripts, if any, have executed” means that asynchronous scripts get executed after phase #3 but before event #4. But according to my tests, this is not what happens, unless my test is wrong. (I tried to replicate my test on \u003ccode\u003eJSFiddle\u003c/code\u003e, but I can’t make the defered/async script work since there is no way to add attribute on external scripts.)\u003c/p\u003e\n\n\u003cp\u003e\u003cstrong\u003eSo my question is: What process(es) takes place between \u003ccode\u003edomContentLoadedEventStart\u003c/code\u003e and \u003ccode\u003edomContentLoadedEventEnd\u003c/code\u003e?\u003c/strong\u003e\u003c/p\u003e","accepted_answer_id":"22026449","answer_count":"1","comment_count":"1","creation_date":"2014-02-14 21:42:41.077 UTC","favorite_count":"3","last_activity_date":"2014-02-25 21:19:35.497 UTC","last_edit_date":"2014-02-21 22:37:16.687 UTC","last_editor_display_name":"","last_editor_user_id":"369759","owner_display_name":"","owner_user_id":"2533008","post_type_id":"1","score":"11","tags":"javascript|dom","view_count":"754"} +{"id":"21790023","title":"Navigation Timing API. What's going on between domContentLoadedEventStart and domContentLoadedEventEnd?","body":"\u003cp\u003eW3C specifies a list of event and their corresponding timings that user agents must return if they want to support the \u003ca href=\"https://dvcs.w3.org/hg/webperf/raw-file/tip/specs/NavigationTiming/Overview.html\"\u003eNavigation Timing API\u003c/a\u003e. \u003c/p\u003e\n\n\u003cp\u003eA list you can see here: \u003ca href=\"http://www.w3.org/TR/navigation-timing/#process\"\u003ehttp://www.w3.org/TR/navigation-timing/#process\u003c/a\u003e\u003c/p\u003e\n\n\u003cp\u003eUnderstanding which process relates to which events is pretty straight forward in most cases. But one thing that eludes me is what is going on between \u003ccode\u003edomContentLoadedEventStart\u003c/code\u003e and \u003ccode\u003edomContentLoadedEventEnd\u003c/code\u003e.\u003c/p\u003e\n\n\u003cp\u003eHere is what I have understood so far and base my reflections on:\u003c/p\u003e\n\n\u003col\u003e\n\u003cli\u003e\u003ccode\u003edomLoading\u003c/code\u003e // The UA starts parsing the document.\u003c/li\u003e\n\u003cli\u003e\u003ccode\u003edomInteractive\u003c/code\u003e // The UA has finished parsing the document. Users\ncan interact with the page.\u003c/li\u003e\n\u003cli\u003e\u003ccode\u003edomContentLoaded\u003c/code\u003e // The document has been completely loaded and\nparsed and deferred scripts, if any, have executed. (Async scripts,\nif any, might or might not have executed???)\u003c/li\u003e\n\u003cli\u003e\u003ccode\u003edomComplete\u003c/code\u003e // The DOM Tree is completely built. Async scripts, if\nany, have executed.\u003c/li\u003e\n\u003cli\u003e\u003ccode\u003eloadEventEnd\u003c/code\u003e // The UA has a fully completed page. All resources,\nlike images, swf, etc, have loaded.\u003c/li\u003e\n\u003c/ol\u003e\n\n\u003cp\u003eOne should be able to deduce what happens after phase #3 (\u003ccode\u003edomContentLoaded\u003c/code\u003e) by understanding what triggered event #4 (\u003ccode\u003edomComplete\u003c/code\u003e) but did not trigger previous events. \u003c/p\u003e\n\n\u003cp\u003eSo one would think that “Async scripts, if any, have executed” means that asynchronous scripts get executed after phase #3 but before event #4. But according to my tests, this is not what happens, unless my test is wrong. (I tried to replicate my test on \u003ccode\u003eJSFiddle\u003c/code\u003e, but I can’t make the defered/async script work since there is no way to add attribute on external scripts.)\u003c/p\u003e\n\n\u003cp\u003e\u003cstrong\u003eSo my question is: What process(opensearch) takes place between \u003ccode\u003edomContentLoadedEventStart\u003c/code\u003e and \u003ccode\u003edomContentLoadedEventEnd\u003c/code\u003e?\u003c/strong\u003e\u003c/p\u003e","accepted_answer_id":"22026449","answer_count":"1","comment_count":"1","creation_date":"2014-02-14 21:42:41.077 UTC","favorite_count":"3","last_activity_date":"2014-02-25 21:19:35.497 UTC","last_edit_date":"2014-02-21 22:37:16.687 UTC","last_editor_display_name":"","last_editor_user_id":"369759","owner_display_name":"","owner_user_id":"2533008","post_type_id":"1","score":"11","tags":"javascript|dom","view_count":"754"} {"id":"7784144","title":"Depth-first search algorithm implementation","body":"\u003cp\u003eThe following C++ Depth-first search program won't compile.\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e#include \u0026lt;iostream\u0026gt;\nusing namespace std;\n\nclass Stack\n{\n\nprivate:\n const int size=20;\n int *st;\n int top;\npublic :\n Stack(){\n st =new int[size];\n top=-1;\n\n }\n ~Stack(){\n delete[] st;\n top=-1;\n }\n void push(int j){\n st[++top]=j;\n }\n int pop(){\n return st[top--];\n }\n int peek(){\n\n return st[top];\n\n }\n bool empthy(){\n return (top==-1);\n\n }\n};\nclass Vertex{\npublic:\n char label;\n bool visited;\npublic:\n Vertex(){\n\n\n }\n Vertex(char lab){\n label=lab;\n visited=false;\n\n }\n };\nclass Graph{\nprivate:\n const int maxvertex=20;\n Vertex* vertexlist;\n int **adj;\n int nverts;\n Stack *stack;\npublic:\n Graph(){\n vertexlist=new Vertex[maxvertex]; \n adj=new int*[maxvertex];\n for (int i=0;i\u0026lt;20;i++)\n adj[i]=new int[maxvertex];\n nverts=0;\n for (int i=0;i\u0026lt;maxvertex;i++){\n for (int j=0;j\u0026lt;maxvertex;j++){\n adj[i][j]=0;\n }\n }\n\n stack=new Stack();\n }\n void add(char lab){\n\n vertexlist[nverts++]=new Vertex(lab);\n }1\n\n\n\n\n};\nint main(){\n\n\n\n\n\n\n\n\n return 0;\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eHere are the compilation errors I am getting:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e\u0026gt; 6 IntelliSense: no operator \"=\" matches these\n\u0026gt; operands c:\\users\\datuashvili\\documents\\visual studio\n\u0026gt; 2010\\projects\\dfs\\dfs\\dfs.cpp 76 23 DFS 7 IntelliSense: expected a\n\u0026gt; declaration c:\\users\\datuashvili\\documents\\visual studio\n\u0026gt; 2010\\projects\\dfs\\dfs\\dfs.cpp 77 3 DFS Error 1 error C2864:\n\u0026gt; 'Stack::size' : only static const integral data members can be\n\u0026gt; initialized within a class c:\\users\\datuashvili\\documents\\visual\n\u0026gt; studio 2010\\projects\\dfs\\dfs\\dfs.cpp 8 1 DFS Error 3 error C2864:\n\u0026gt; 'Graph::maxvertex' : only static const integral data members can be\n\u0026gt; initialized within a class c:\\users\\datuashvili\\documents\\visual\n\u0026gt; studio 2010\\projects\\dfs\\dfs\\dfs.cpp 54 1 DFS Error 2 error C2758:\n\u0026gt; 'Stack::size' : must be initialized in constructor base/member\n\u0026gt; initializer list c:\\users\\datuashvili\\documents\\visual studio\n\u0026gt; 2010\\projects\\dfs\\dfs\\dfs.cpp 12 1 DFS Error 4 error C2758:\n\u0026gt; 'Graph::maxvertex' : must be initialized in constructor base/member\n\u0026gt; initializer list c:\\users\\datuashvili\\documents\\visual studio\n\u0026gt; 2010\\projects\\dfs\\dfs\\dfs.cpp 60 1 DFS Error 5 error C2679: binary '='\n\u0026gt; : no operator found which takes a right-hand operand of type 'Vertex\n\u0026gt; *' (or there is no acceptable conversion) c:\\users\\datuashvili\\documents\\visual studio\n\u0026gt; 2010\\projects\\dfs\\dfs\\dfs.cpp 76 1 DFS\n\u003c/code\u003e\u003c/pre\u003e","answer_count":"1","comment_count":"1","creation_date":"2011-10-16 10:45:37.78 UTC","last_activity_date":"2011-10-16 12:07:03.68 UTC","last_editor_display_name":"","owner_display_name":"user466441","owner_user_id":"466534","post_type_id":"1","score":"-1","tags":"c++","view_count":"1535"} {"id":"8031761","title":"Adobe air 3.0 ANE for Android, null ExtensionContext?","body":"\u003cp\u003eI've been working with the vibration example from Adobe for Air 3.0's native extensions on Android. \u003c/p\u003e\n\n\u003cp\u003eI have the ANE compiled and the .apk packaged.\u003c/p\u003e\n\n\u003cp\u003eThe problem I'm having is the actionscript library is getting a null ExtensionContext.\u003c/p\u003e\n\n\u003cp\u003eI tried creating the .apk with adt -package -target apk-debug so that I can see the actionscript traces in logcat and that's where I'm finding the null error.\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eextContext = ExtensionContext.createExtensionContext(\"com.adobe.Vibration\", null);\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eextContext is null and crashes on the following .call() method.\u003c/p\u003e\n\n\u003cp\u003eAll of the source is stock from the examples, I haven't changed anything.\u003c/p\u003e\n\n\u003cp\u003eDoes anyone have any experience with getting one of Adobe's ANE examples working on a windows machine? Most of the examples are for Mac.\u003c/p\u003e","accepted_answer_id":"8100138","answer_count":"3","comment_count":"3","creation_date":"2011-11-07 01:08:48.823 UTC","last_activity_date":"2013-10-07 15:30:31.38 UTC","last_edit_date":"2012-01-23 16:31:43.183 UTC","last_editor_display_name":"","last_editor_user_id":"757154","owner_display_name":"","owner_user_id":"751231","post_type_id":"1","score":"2","tags":"android|actionscript-3|flex|air|air-native-extension","view_count":"2553"} {"id":"47450597","title":"How to multiply 4 number in assembly language","body":"\u003cp\u003eHow to multiply 4 number in assembly language using 8bit and Shift.\nI am able to multiply 2 but how to multiply 4 number \nEg 2 4 5 6 \u003c/p\u003e\n\n\u003cp\u003ethis is code of two digit multiplication :\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e[org 0x100]\nmultiplicand: db 13\nmultiplier: db 5\nresult: db 0\nmov cl, 4\nmov bl, [multiplicand]\nmov dl, [multiplier]\ncheckbit: shr dl, 1\njnc skip\nadd [result], bl\nskip: shl bl, 1 \ndec cl \njnz checkbit\nmov ax, 0x4c00\nint 0x21\n\u003c/code\u003e\u003c/pre\u003e","answer_count":"0","comment_count":"8","creation_date":"2017-11-23 08:19:04.697 UTC","favorite_count":"1","last_activity_date":"2017-11-24 11:34:09.48 UTC","last_edit_date":"2017-11-23 09:00:52.863 UTC","last_editor_display_name":"","last_editor_user_id":"8852829","owner_display_name":"","owner_user_id":"8852829","post_type_id":"1","score":"0","tags":"assembly|bit-manipulation|nasm|multiplication","view_count":"44"} @@ -740,7 +740,7 @@ {"id":"29671231","title":"How to get different languages for current location?","body":"\u003cp\u003eHow to get current location in different language.Do we need to add some libraries?I need to find the current address in arabic language? I think there is google maps in arabic format.Similarily is there any way to get current address in arabic lamguage?? \u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003epublic static final String GOOGLE_GEOCODER = \"http://maps.googleapis.com/maps/api/geocode/json?latlng=\";\n\npublic static String getAddressFromGPSData(double lat, double longi) {\n HttpRetriever agent = new HttpRetriever();\n String request = GOOGLE_GEOCODER + lat + \",\"\n + longi + \"\u0026amp;sensor=true\";\n // Log.d(\"GeoCoder\", request);\n String response = agent.retrieve(request);\n String formattedAddress = \"\";\n if (response != null) {\n Log.d(\"GeoCoder\", response);\n try {\n JSONObject parentObject = new JSONObject(response);\n JSONArray arrayOfAddressResults = parentObject\n .getJSONArray(\"results\");\n JSONObject addressItem = arrayOfAddressResults.getJSONObject(0);\n formattedAddress = addressItem.getString(\"formatted_address\");\n } catch (JSONException e) {\n\n e.printStackTrace();\n }\n\n }\n\n // Log.d(\"GeoCoder\", response);\n return formattedAddress;\n}\n\u003c/code\u003e\u003c/pre\u003e","answer_count":"1","comment_count":"0","creation_date":"2015-04-16 09:54:56.607 UTC","last_activity_date":"2015-04-17 04:55:32.097 UTC","last_edit_date":"2015-04-17 04:55:32.097 UTC","last_editor_display_name":"","last_editor_user_id":"4729321","owner_display_name":"","owner_user_id":"4729321","post_type_id":"1","score":"-1","tags":"android|google-maps|reverse-geocoding|street-address","view_count":"813"} {"id":"16258127","title":"Adding Adobe Edge Scripts into Rails Asset Pipeline","body":"\u003cp\u003eThis is likely a simple case of bringing external scripts with dependencies into Rails.\u003c/p\u003e\n\n\u003cp\u003eI'm trying to get my Adobe Edge generated Animations to work within my Rails app and the first step is to include all of Adobe Edge's generated js files, but so far I'm just getting a bunch of \u003cstrong\u003e404 Not Found Errors\u003c/strong\u003e for all of the Edge files I've included in the \u003ccode\u003eApplication.js\u003c/code\u003e file.\u003c/p\u003e\n\n\u003cp\u003eHere's my Application.js file\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e//= require jquery\n//= require jquery_ujs\n//= require underscore\n//= require backbone\n//= require california_internet\n//= require hero_edgePreload\n//= require edge_includes/edge.1.5.0.min\n//= require hero_edge\n//= require hero_edgeActions\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eHere is how Edge's Preloader.js is trying to find some of the files...\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eaLoader=[{load:\"edge_includes/jquery-1.7.1.min.js\"},{load:\"edge_includes/edge.1.5.0.min.js\"},{load:\"hero_edge.js\"},{load:\"hero_edgeActions.js\"}]\n\u003c/code\u003e\u003c/pre\u003e","answer_count":"2","comment_count":"0","creation_date":"2013-04-27 23:56:44.573 UTC","last_activity_date":"2013-11-27 09:30:25.55 UTC","last_edit_date":"2013-09-10 20:18:56.07 UTC","last_editor_display_name":"","last_editor_user_id":"125981","owner_display_name":"","owner_user_id":"1686562","post_type_id":"1","score":"3","tags":"javascript|jquery|ruby-on-rails|asset-pipeline|adobe-edge","view_count":"639"} {"id":"43464173","title":"can't get css style's working on input type tel/number only","body":"\u003cp\u003eI'm having some real problems getting input type=tel or input type=number to work on every mobile device. It looks fine on Samsgung s7, iphone 6, hell even an LG K500K. But won't work on my HTC M9 or a Samsung Note. I've checked versions of androids and it seems fine one is using 6.0.1 and the other is on 6.0. I've tried using both a class and also input[type=tel/number] and even just input on it's own :( i looked around online for hours and couldn't find one person who's experiencing this same incosistency. Does anybody have any advice\u003c/p\u003e","answer_count":"0","comment_count":"4","creation_date":"2017-04-18 05:20:16.337 UTC","last_activity_date":"2017-04-18 05:20:16.337 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"4123298","post_type_id":"1","score":"0","tags":"android|html|css|numbers|tel","view_count":"77"} -{"id":"34155653","title":"MySQL is not working after resizing EC2 instance","body":"\u003cp\u003eI have configured separate MySQL Production server as EC2- \u003ca href=\"/questions/tagged/m2.xlarge\" class=\"post-tag\" title=\"show questions tagged \u0026#39;m2.xlarge\u0026#39;\" rel=\"tag\"\u003em2.xlarge\u003c/a\u003e. This is ubuntu server.MySQL is working perfectly from 3 years on this instance. \u003cbr/\u003e\u003cbr/\u003e\nBut now I thought of downgrading a server because of less usage. When I stepped ahead to downgrade the server as \u003ca href=\"/questions/tagged/m1.medium\" class=\"post-tag\" title=\"show questions tagged \u0026#39;m1.medium\u0026#39;\" rel=\"tag\"\u003em1.medium\u003c/a\u003e or \u003ca href=\"/questions/tagged/m1.small\" class=\"post-tag\" title=\"show questions tagged \u0026#39;m1.small\u0026#39;\" rel=\"tag\"\u003em1.small\u003c/a\u003e using \u003ca href=\"https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ec2-instance-resize.html\" rel=\"nofollow\"\u003esteps given by AWS\u003c/a\u003e , I was able to resize it and started it with reassigning my original elastic IP, I found MySQL is now connecting from anywhere. \u003cbr/\u003e\u003cbr/\u003e\nI troubleshooted to connect MYSQL after resize as follows.\u003cbr/\u003e\n1. connected with ssh login on same instance and used command-line to connect.\u003cbr/\u003e\n2. Tried from outside(workbench) with same configuration..\u003cbr/\u003e\n3. Checked security groups assigned to instance. those are also in place..\u003cbr/\u003e\n4. \u003cstrong\u003eBut when I again resize instance with original (m2.xlarge), It works\u003c/strong\u003e .\u003cbr/\u003e\u003c/p\u003e\n\n\u003cp\u003eStill no success. any thing I missed after resizing process?\u003c/p\u003e","answer_count":"0","comment_count":"3","creation_date":"2015-12-08 12:16:05.147 UTC","last_activity_date":"2015-12-08 12:16:05.147 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"584122","post_type_id":"1","score":"0","tags":"mysql|amazon-web-services|ssh|amazon-ec2|aws-ec2","view_count":"66"} +{"id":"34155653","title":"MySQL is not working after resizing EC2 instance","body":"\u003cp\u003eI have configured separate MySQL Production server as EC2- \u003ca href=\"/questions/tagged/m2.xlarge\" class=\"post-tag\" title=\"show questions tagged \u0026#39;m2.xlarge\u0026#39;\" rel=\"tag\"\u003em2.xlarge\u003c/a\u003e. This is ubuntu server.MySQL is working perfectly from 3 years on this instance. \u003cbr/\u003e\u003cbr/\u003e\nBut now I thought of downgrading a server because of less usage. When I stepped ahead to downgrade the server as \u003ca href=\"/questions/tagged/m1.medium\" class=\"post-tag\" title=\"show questions tagged \u0026#39;m1.medium\u0026#39;\" rel=\"tag\"\u003em1.medium\u003c/a\u003e or \u003ca href=\"/questions/tagged/m1.small\" class=\"post-tag\" title=\"show questions tagged \u0026#39;m1.small\u0026#39;\" rel=\"tag\"\u003em1.small\u003c/a\u003e using \u003ca href=\"https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ec2-instance-resize.html\" rel=\"nofollow\"\u003esteps given by AWS\u003c/a\u003e , I was able to resize it and started it with reassigning my original opensearch IP, I found MySQL is now connecting from anywhere. \u003cbr/\u003e\u003cbr/\u003e\nI troubleshooted to connect MYSQL after resize as follows.\u003cbr/\u003e\n1. connected with ssh login on same instance and used command-line to connect.\u003cbr/\u003e\n2. Tried from outside(workbench) with same configuration..\u003cbr/\u003e\n3. Checked security groups assigned to instance. those are also in place..\u003cbr/\u003e\n4. \u003cstrong\u003eBut when I again resize instance with original (m2.xlarge), It works\u003c/strong\u003e .\u003cbr/\u003e\u003c/p\u003e\n\n\u003cp\u003eStill no success. any thing I missed after resizing process?\u003c/p\u003e","answer_count":"0","comment_count":"3","creation_date":"2015-12-08 12:16:05.147 UTC","last_activity_date":"2015-12-08 12:16:05.147 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"584122","post_type_id":"1","score":"0","tags":"mysql|amazon-web-services|ssh|amazon-ec2|aws-ec2","view_count":"66"} {"id":"38158224","title":"Initializer for conditional binding must have Optional type, not 'String' Variable Error","body":"\u003cp\u003eHello i have array and i want to explode 1 item into it and i want to check if variable not null or null and gives me that error\u003c/p\u003e\n\n\u003cblockquote\u003e\n \u003cp\u003eInitializer for conditional binding must have Optional type, not 'String' Variable Error\u003c/p\u003e\n\u003c/blockquote\u003e\n\n\u003cp\u003eMy codes here.\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e var myStringArrctakesf = itemListcomming.components(separatedBy: \",\")\n\n if let commingtel = myStringArrctakesf[11] { \n\n //notnull \n } else {\n\n//null\n }\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eI want if myStringArrctakesf[11] is null dont crash app and if not null show variable.\u003c/p\u003e\n\n\u003cp\u003eThanks\u003c/p\u003e","accepted_answer_id":"38159708","answer_count":"1","comment_count":"1","creation_date":"2016-07-02 08:42:27.147 UTC","last_activity_date":"2016-07-02 12:25:02.56 UTC","last_edit_date":"2016-07-02 12:25:02.56 UTC","last_editor_display_name":"","last_editor_user_id":"2442804","owner_display_name":"","owner_user_id":"5393528","post_type_id":"1","score":"0","tags":"string|variables|swift2|swift3","view_count":"457"} {"id":"17728444","title":"How can I call functions in different orders in PHP?","body":"\u003cp\u003eI am making a way to organize user into groups and need a way to check them. For example I have a method to check gender, which return true if there is no gender imbalance and false if there is. I also have similar methods to check age distribution and to check genders. For each of these methods I have a method which makes an optimized array of groups. \u003c/p\u003e\n\n\u003cp\u003eI.e. i have a method which optimizes groups based on gender. I would only call one of these methods if the respective check returns false. The problem I am running into is that when I optimize the groups based on a specific criterion i.e. gender, there is a chance that the new optimized groups have messed up another check criterion. \u003c/p\u003e\n\n\u003cp\u003eFor example, if I check age, gender and skill level (my third check) and I find that there is an imbalance in age, proceed to optimize the groups with respect to age, then I could potentially mess up gender or skill level distributions. My solution to this problem was that If I could find a way call all variations of the check methods and break if a check all method returned true (all checks return true, all groups have good balances of age, gender and skill level). \u003c/p\u003e\n\n\u003cp\u003eEx: \nLet A, B and C be check methods and optimize_A, optimize_B, optimize_C be the make optimized group methods. I need some way loop through the check methods 3! times (because there are 3 check methods and I need to run ABC, ACB, BAC, BCA, CAB, CBA). OR I could do a while loop and break if the method check all () returns true (all checks return true, all have good distributions) and break once I have run the all the combinations of the check methods. \u003c/p\u003e\n\n\u003cp\u003eCould anyone help me with this problem? Please bear in mind I am a novice programmer and have never done anything like this before. Thanks\u003c/p\u003e\n\n\u003cp\u003e\u003cb\u003eEdit\u003c/b\u003e: \u003c/p\u003e\n\n\u003cp\u003eHow could I do something like this JavaScript code snippet in php?\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003evar arr = [check1, check2, check3],\n rand = Math.floor(Math.random() * arr.length),\n func = arr[rand];\nfunc();\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eHere is my attempt:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e\u0026lt;?php\n\n$checks_to_run = array('check_gender','check_age','check_skill_level');\n$rand = floor(rand(0, count($checks_to_run) - 1));\n$func = $checks_to_run[$rand];\necho $this-\u0026gt;.$func.($tmp); // $tmp is an array of groups \n\n?\u0026gt;\n\u003c/code\u003e\u003c/pre\u003e","accepted_answer_id":"17730808","answer_count":"1","comment_count":"4","creation_date":"2013-07-18 15:58:06.237 UTC","favorite_count":"1","last_activity_date":"2013-07-18 18:02:22.663 UTC","last_edit_date":"2013-07-18 17:49:14.993 UTC","last_editor_display_name":"","last_editor_user_id":"2554416","owner_display_name":"","owner_user_id":"2554416","post_type_id":"1","score":"4","tags":"php","view_count":"113"} {"id":"16013146","title":"Javascript Prompt inside loop","body":"\u003cpre\u003e\u003ccode\u003efor (var j=0; j\u0026lt;2; j++){\nlistno=prompt(\"Enter Item Code\",\"0\");\nlistno = parseInt(listno);\n\nif (listno \u0026gt; 0) {\n PRODUCT_WANT.push(PRODUCT_LIST[listno]);\n WANT_PRICE.push(PRICE_LIST[listno]);\n}\n\nelse {\nalert('Invalid Product Code');\n}\nif (quantno \u0026gt; 0) {\n quantno=prompt(\"Enter Quantity\",\"0\");\n quantno = parseInt(quantno);\n quantity.push(quantno);\n}\n\nelse {\nalert('Invalid Quantity');\n}\n} \n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eThe loop works but I don't want to have to set the loop count I want to be able to put it to eg 999 then be able to press cancel on the prompt and have the loop finish \u003c/p\u003e","answer_count":"2","comment_count":"0","creation_date":"2013-04-15 10:40:08.113 UTC","last_activity_date":"2013-04-15 12:00:55.987 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"2181271","post_type_id":"1","score":"0","tags":"javascript|loops|prompt","view_count":"4163"} @@ -940,7 +940,7 @@ {"id":"25450212","title":"Zend2 class loading in huge application","body":"\u003cp\u003eI started work on a huge project build on Zend2 with Sceleton and Modular structure.\u003c/p\u003e\n\n\u003cp\u003eThe project seems build professionally but with big performance issue (pages load for 15-30 sec).\u003cbr\u003e\nTiming from \u003ccode\u003eindex.php\u003c/code\u003e to Model.Controller.IndexAction is around 2 sec ... \u003c/p\u003e\n\n\u003cp\u003eI put a lot of research but couldn't find problem in the business logic.\u003cbr\u003e\nIt seems like \u003ccode\u003einclude $file\u003c/code\u003e and \u003ccode\u003enew $class\u003c/code\u003e (repeated thousands of times ) create the overall slowness.\u003c/p\u003e\n\n\u003cp\u003eAny suggestion where to look at will be appreciated.\u003c/p\u003e","answer_count":"0","comment_count":"3","creation_date":"2014-08-22 15:08:01.91 UTC","favorite_count":"1","last_activity_date":"2014-08-22 15:08:01.91 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"1621821","post_type_id":"1","score":"1","tags":"optimization|zend-framework2","view_count":"73"} {"id":"6210309","title":"filenotfound issues in eclipse on Mac OSX","body":"\u003cp\u003eHi \nI have a java class which is working fine in windows but not in Mac OSX snow leopard. I am using Eclipse on both operating systems. On Mac OSX its throwing file not found exception. \u003c/p\u003e\n\n\u003cp\u003eBasically I am trying to read a file using BufferedReader and FileReader and I put my file in \\resources\\\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eimport java.io.BufferedReader;\nimport java.io.FileNotFoundException;\nimport java.io.FileReader;\nimport java.io.IOException;\n\npublic class ReadFileContents {\n\n /**\n * @param args\n */\n public static void main(String[] args) {\n\n BufferedReader br = null;\n String line = \"\";\n try {\n br = new BufferedReader(new FileReader(\"resources\\\\abc\"));\n while((line = br.readLine())!= null)\n {\n System.out.println(\"Read ::: \"+line+\" From File.\");\n }\n } catch (FileNotFoundException fne) {\n fne.printStackTrace();\n }catch (IOException ioe) {\n ioe.printStackTrace();\n }\n\n }\n\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eOn Mac it is giving\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003ejava.io.FileNotFoundException: resources\\abc (No such file or directory)\n at java.io.FileInputStream.open(Native Method)\n at java.io.FileInputStream.\u0026lt;init\u0026gt;(FileInputStream.java:106)\n at java.io.FileInputStream.\u0026lt;init\u0026gt;(FileInputStream.java:66)\n at java.io.FileReader.\u0026lt;init\u0026gt;(FileReader.java:41)\n at ReadFileContents.main(ReadFileContents.java:18)\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eDo I need any special configuration in my eclipse to get this working...\u003c/p\u003e","accepted_answer_id":"6210374","answer_count":"1","comment_count":"0","creation_date":"2011-06-02 04:19:28.38 UTC","last_activity_date":"2011-06-02 04:28:52.293 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"373625","post_type_id":"1","score":"0","tags":"eclipse|osx|filenotfoundexception","view_count":"2094"} {"id":"37989676","title":"ERROR: missing FROM-clause entry for table when running function","body":"\u003cp\u003eI want to create a trigger and a function that do the following: Each time a new row is inserted in table SalesOrderDetail the function finds the corresponding CustomerID from table SalesOrderHeader and then it adds +1 to the corresponding number_of_sales in the Customer table.\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eSalesOrderDetail\n+---------+-------------------------+\n| SalesOrderID | SalesOrderDetailID |\n+---------+-------------------------+\n| value1 | value4 |\n| value1 | value5 |\n| value2 | value6 |\n| value3 | value7 |\n| value3 | value8 |\n| value4 | value9 |\n+---------+-------------------------+\n\n\nSalesOrderHeader\n+---------+-----------------+\n| SalesOrderID | CustomerID |\n+---------+-----------------+\n| value1 | value10 |\n| value2 | value11 |\n| value3 | value12 |\n| value4 | value13 |\n+---------+-----------------+\n\n\n\nCustomer\n+---------+--------------------+\n| CustomerID | Number_of_sales |\n+---------+--------------------+\n| value10 | value14 |\n| value11 | value15 |\n| value12 | value16 |\n| value13 | value17 |\n+---------+--------------------+\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eCode is as follows:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eCREATE OR REPLACE FUNCTION new_order_detail()\nRETURNS trigger AS\n$BODY$\nBEGIN\n DROP TABLE IF EXISTS CustomerInfo;\n CREATE TEMP TABLE CustomerInfo AS\n SELECT* FROM(SELECT CustomerID FROM(\n SELECT * from SalesOrderHeader\n WHERE SalesOrderHeader.SalesOrderID = (SELECT SalesOrderID FROM SalesOrderDetail ORDER BY SalesOrderID DESC limit 1))AS Last_Entry) AS Common_Element;\n\n\n\n\n IF CustomerInfo.CustomerID = Customer.CustomerID THEN\n UPDATE Customer\n SET number_of_items = number_of_items + 1;\n END IF;\n\n\n\nEND;\n$BODY$ LANGUAGE plpgsql;\n\n\nDROP TRIGGER IF EXISTS new_order ON SalesOrderDetail;\n\nCREATE TRIGGER new_order\n AFTER INSERT OR UPDATE ON SalesOrderDetail\n FOR EACH ROW EXECUTE PROCEDURE new_order_detail();\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eWhen I insert something into the SalesOrderDetail table I get the following error:\u003c/p\u003e\n\n\u003cblockquote\u003e\n \u003cp\u003ePL/pgSQL function new_order_detail() line 3 at SQL statement ERROR: \n missing FROM-clause entry for table \"customerinfo\" LINE 1: SELECT\n CustomerInfo.CustomerID = Customer.CustomerID\n ^ QUERY: SELECT CustomerInfo.CustomerID = Customer.CustomerID CONTEXT: PL/pgSQL function new_order_detail()\n line 12 at IF\n ********** Error **********\u003c/p\u003e\n \n \u003cp\u003eERROR: missing FROM-clause entry for table \"customerinfo\" SQL state:\n 42P01 Context: PL/pgSQL function new_order_detail() line 12 at IF\u003c/p\u003e\n\u003c/blockquote\u003e\n\n\u003cp\u003eWhat I am doing wrong? Sorry for the poor explanation English is not my native language.\u003c/p\u003e","answer_count":"1","comment_count":"0","creation_date":"2016-06-23 10:59:10.877 UTC","last_activity_date":"2016-06-23 11:15:13.297 UTC","last_edit_date":"2016-06-23 11:07:16.82 UTC","last_editor_display_name":"","last_editor_user_id":"267540","owner_display_name":"","owner_user_id":"6498633","post_type_id":"1","score":"1","tags":"postgresql|plpgsql","view_count":"860"} -{"id":"4996347","title":"How to detect click on an object in OpenGL ES in Android application?","body":"\u003cp\u003eIn my Android application, I have a bunch of meshes in a collection that are moving around in 3D space. I want something to happen to the mesh when any one of the moving mesh (objects) are touched. How can I detect which object has been tapped / clicked?\u003c/p\u003e","answer_count":"2","comment_count":"0","creation_date":"2011-02-14 19:24:42.547 UTC","favorite_count":"3","last_activity_date":"2016-02-22 22:26:47.253 UTC","last_edit_date":"2016-02-22 22:26:47.253 UTC","last_editor_display_name":"","last_editor_user_id":"4518455","owner_display_name":"","owner_user_id":"574122","post_type_id":"1","score":"6","tags":"android|opengl-es","view_count":"5705"} +{"id":"4996347","title":"How to detect click on an object in OpenGL opensearch in Android application?","body":"\u003cp\u003eIn my Android application, I have a bunch of meshes in a collection that are moving around in 3D space. I want something to happen to the mesh when any one of the moving mesh (objects) are touched. How can I detect which object has been tapped / clicked?\u003c/p\u003e","answer_count":"2","comment_count":"0","creation_date":"2011-02-14 19:24:42.547 UTC","favorite_count":"3","last_activity_date":"2016-02-22 22:26:47.253 UTC","last_edit_date":"2016-02-22 22:26:47.253 UTC","last_editor_display_name":"","last_editor_user_id":"4518455","owner_display_name":"","owner_user_id":"574122","post_type_id":"1","score":"6","tags":"android|opengl-opensearch","view_count":"5705"} {"id":"38483967","title":"jQuery replace with variable not working","body":"\u003cp\u003eI am trying to replace string with data.replace, its working fine if using hard code or static value. But now i want to replace multiple values with loop but its not working.\u003c/p\u003e\n\n\u003cp\u003eMy Code: \u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003efor(var i = 0; i\u0026lt;words.length; i++){\n var r = words[i];\n data = data.replace(/\\[(\\[qid:{r})\\]]/g, words[i]);\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eWords contains: \u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eArray [ \"hid_1\", \"hid_2\", \"hid_6\", \"hid_7\" ]\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eand my data is:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eSite: [[qid:hid_1]]\u0026lt;br\u0026gt;\n\nBlock: [[qid:hid_2]]\u0026lt;br\u0026gt;\n\nNimewo kay la: [[qid:hid_6]]\u0026lt;br\u0026gt;\n\nLatitude: [[qid:hid_7]]\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eits an HTML content.\u003c/p\u003e\n\n\u003cp\u003ei just need variable here:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003efor(var i = 0; i\u0026lt;words.length; i++){\n\n var r = words[i];\n data = data.replace(/\\[(\\[qid:hid_1)\\]]/g, 'test');\n //data.replace(/\\[(\\[qid:{r})\\]]/g, 'test');\n\n }\n\u003c/code\u003e\u003c/pre\u003e","accepted_answer_id":"38484577","answer_count":"3","comment_count":"9","creation_date":"2016-07-20 14:24:37.95 UTC","last_activity_date":"2016-07-20 17:39:19.397 UTC","last_edit_date":"2016-07-20 16:51:21.6 UTC","last_editor_display_name":"","last_editor_user_id":"3100816","owner_display_name":"","owner_user_id":"3100816","post_type_id":"1","score":"0","tags":"javascript|jquery","view_count":"70"} {"id":"33864088","title":"MySQL case insensitive search in UTF8 processing latin uppercase/lowercase","body":"\u003cp\u003efolks.\nI'm new at MySQL programming and I've tried all things to manage this.\u003c/p\u003e\n\n\u003cp\u003eI would like to do a insensitive search with/without accents, lowercases or uppercases and all return the same results.\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eSELECT * FROM table WHERE campo_pesquisado LIKE '%termo_pesquisado%' ORDER BY campo_pesquisado ASC\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eSo, in my DB (MyISAM - collation utf8_general_ci) I have this\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e+---------+--------+\n| campo_pesquisado |\n+---------+--------+\n| São Paulo |\n|  SÃO JOÃO |\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eI would like to type termo_pesquisado (keywords) = São Paulo, Sao Paulo, SÃO PAULO or any combination of 'São Paulo' to get the return of \u003cstrong\u003eSão Paulo\u003c/strong\u003e (that in browser shows correctly - São Paulo) from the database.\u003c/p\u003e\n\n\u003cp\u003e\u003cstrong\u003eTHE PROBLEM\u003c/strong\u003e\u003c/p\u003e\n\n\u003cp\u003eIf I type \"são paulo, SãO PAULO or any combination with the \"\u003cstrong\u003eã\u003c/strong\u003e\" lowercase works. It's because the UTF-8 respective code for \u003cstrong\u003eã\u003c/strong\u003e is \u003cstrong\u003eã\u003c/strong\u003e. If I search for SÃO PAULO, the à letter become \u003cstrong\u003eÃ\u003c/strong\u003e the full word will be \u003cstrong\u003eSÃO PAULO\u003c/strong\u003e that is clearly not equal to \u003cstrong\u003eSão Paulo\u003c/strong\u003e.\u003c/p\u003e\n\n\u003cp\u003e\u003cstrong\u003eTRYING\u003c/strong\u003e\u003c/p\u003e\n\n\u003cp\u003eTo solve this I tried this code bellow, but is not working for me.\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e SELECT *, CONVERT(CAST( campo_pesquisado AS BINARY) USING utf8) FROM table WHERE CONVERT(CAST( campo_pesquisado AS BINARY) USING utf8) LIKE '%termo_pesquisado%' ORDER BY campo_pesquisado ASC\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003e\u003cstrong\u003eIMPORTANT\u003c/strong\u003e\u003c/p\u003e\n\n\u003cp\u003eI can't change my collation. We have to use utf8 as char encode for the tables. Its better for multilanguage purposes.\u003c/p\u003e\n\n\u003cp\u003eI'm using PHP (5.5) and the last version of MySQL.\u003c/p\u003e","answer_count":"0","comment_count":"10","creation_date":"2015-11-23 05:11:00.95 UTC","last_activity_date":"2015-11-23 05:11:00.95 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"4188476","post_type_id":"1","score":"0","tags":"php|mysql|utf-8|latin1","view_count":"28"} {"id":"3908368","title":"Cannot find any coverage data (ASP.NET MVC2)","body":"\u003cp\u003eI am having some issues with getting Code Coverage working with an out-of-the-box ASP.NET MVC2 Web App\u003c/p\u003e\n\n\u003cp\u003eVS2010 Ultimate, File \u003e New Project \u003e ASP.NET MVC 2 Web Application \u003e Yes, Create unit test project with Visual Studio Unit Test. I then Rebuild All, Run All Unit Tests, Go to Code Coverage and get the following message:\u003c/p\u003e\n\n\u003cblockquote\u003e\n \u003cp\u003eCannot find any coverage data (.coverage or .coveragexml) files. Check test run details for possible errors.\u003c/p\u003e\n\u003c/blockquote\u003e\n\n\u003cp\u003eAll the unit tests passed. (And I haven't touched a line of code yet)\u003c/p\u003e\n\n\u003cp\u003eI did find the following on the web:\u003cbr\u003e\n\u003ca href=\"http://www.vbforums.com/showthread.php?t=615377\" rel=\"nofollow noreferrer\"\u003ehttp://www.vbforums.com/showthread.php?t=615377\u003c/a\u003e\u003cbr\u003e\nwhich says to do the following:\u003c/p\u003e\n\n\u003cblockquote\u003e\n \u003cp\u003eTest -\u003e Edit Test Settings -\u003e Local\u003cbr\u003e\n In the test settings dialog, click\n \"Data and Diagnostics\" Ensure \"Code\n Coverage\" are checked, and\n double-click on it Check of the dll's\n you want code coverage enabled for.\u003c/p\u003e\n\u003c/blockquote\u003e\n\n\u003cp\u003eBut, when I go to Test \u003e Edit Test Seetings, all I see is the grayed out menu item stating \"No Test Settings Available\".\u003c/p\u003e\n\n\u003cp\u003eAny ideas?\u003c/p\u003e\n\n\u003cp\u003eEdit: slowly gaining traction. See: \u003ca href=\"https://stackoverflow.com/questions/1155743/how-to-create-the-vsmdi-testrunconfig-file-when-importing-a-visual-studio-test-pr\"\u003eHow to create the vsmdi/testrunconfig file when importing a Visual Studio test project?\u003c/a\u003e\u003c/p\u003e","answer_count":"1","comment_count":"0","creation_date":"2010-10-11 17:06:43.437 UTC","last_activity_date":"2016-12-29 18:44:21.55 UTC","last_edit_date":"2017-05-23 12:24:55.603 UTC","last_editor_display_name":"","last_editor_user_id":"-1","owner_display_name":"","owner_user_id":"156611","post_type_id":"1","score":"1","tags":"visual-studio-2010|unit-testing|asp.net-mvc-2","view_count":"2301"} @@ -949,7 +949,7 @@ {"id":"39977564","title":"How to compare one value in a table with a list of other values in another table","body":"\u003cp\u003eI have two tables:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003ea = {customer1:1234, customer2:3456, customer3:4567, customer4:3456}\nb = {2345, 1234, 3456, 6789}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eI would like to know if there is a way to compare the tables for a match. If no, then that value gets deleted. I am unable to find a way to do one to many compare against the values.\u003c/p\u003e\n\n\u003cp\u003ePlease can you advise on how I can achieve that?\u003c/p\u003e","answer_count":"2","comment_count":"0","creation_date":"2016-10-11 12:45:11.18 UTC","last_activity_date":"2016-10-11 21:44:30.013 UTC","last_edit_date":"2016-10-11 21:44:30.013 UTC","last_editor_display_name":"","last_editor_user_id":"2505965","owner_display_name":"","owner_user_id":"1675307","post_type_id":"1","score":"0","tags":"lua|lua-table","view_count":"66"} {"id":"29922734","title":"Searching function using PHP \u0026 mysql","body":"\u003cp\u003eIf I enter one value, the system will function properly. The problem is, when I enter more than one value, the system does not display any output. I need the user can enter the data in bulk and separated by a space. Is there any problem with my code? \u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e$studid = clean($_POST['studid']);\n$studid2 = $studid; \n$studid3 =explode(' ',$studid2);\nrequire(\"php/conn.php\");\n\n$sql=mysql_query(\"SELECT id,studid,name from student where studid in('\". implode(',', $studid3).\"') group by studid,name\");\n\necho\"\n\u0026lt;table\u0026gt;\n\u0026lt;thead\u0026gt;\u0026lt;tr\u0026gt;\u0026lt;th\u0026gt;NAME\u0026lt;/th\u0026gt;\u0026lt;th\u0026gt;IC NUMBER\u0026lt;/th\u0026gt;\u0026lt;th\u0026gt;ADDRESS\u0026lt;/th\u0026gt;\u0026lt;th\u0026gt;PHONE NUMBER\u0026lt;/th\u0026gt;\u0026lt;th\u0026gt;SOURCE\u0026lt;/th\u0026gt;\u0026lt;/tr\u0026gt;\u0026lt;/thead\u0026gt;\u0026lt;tbody\u0026gt;\";\n\nif(mysql_num_rows($q) \u0026gt; 0) \n{\n\n while($row=mysql_fetch_array($q))\n { \n echo\"\n \u0026lt;tr\u0026gt;\n \u0026lt;td\u0026gt;\".$row['nama'].\"\u0026lt;/td\u0026gt;\n \u0026lt;td\u0026gt;\".$row['nokp'].\"\u0026lt;/td\u0026gt;\n \u0026lt;td\u0026gt;\".$row['alamat2'].\"\u0026lt;/td\u0026gt;\n \u0026lt;td\u0026gt;\".$row['notel2'].\"\u0026lt;/td\u0026gt;\n \u0026lt;td\u0026gt;\".$row['info'].\"\u0026lt;/td\u0026gt;\n \u0026lt;/tr\u0026gt;\";\n }\n\n echo \"\u0026lt;/tbody\u0026gt;\u0026lt;/table\u0026gt;\u0026lt;/div\u0026gt;\"; \n} \n\u003c/code\u003e\u003c/pre\u003e","answer_count":"1","comment_count":"9","creation_date":"2015-04-28 14:56:10.74 UTC","last_activity_date":"2015-04-28 17:16:00.937 UTC","last_edit_date":"2015-04-28 17:11:23.73 UTC","last_editor_display_name":"","last_editor_user_id":"4842602","owner_display_name":"","owner_user_id":"4842602","post_type_id":"1","score":"-2","tags":"php|mysql","view_count":"48"} {"id":"46349021","title":"How can I control volume on external device (or know it's connected)?","body":"\u003cp\u003eI want to make an app to control volume on an external device (like Spotify Connect) as well as apps on my phone itself. I can control the volume using KEYCODE_VOLUME_UP, but the lag is terrible. If I use AudioManger, it's perfect but it doesn't work for the external device. I want to use AudioManager for the phone apps and KeyEvents for the external device. However, I'm not sure how to know when to use which. How can I tell if the phone is currently controlling an external device or not?\u003c/p\u003e\n\n\u003cp\u003eRight now I'm using \u003ccode\u003eAudioManager.getDevices()\u003c/code\u003e to find out what devices are connected, but there are several problems with this:\u003c/p\u003e\n\n\u003col\u003e\n\u003cli\u003eIt only works on api 23+\u003c/li\u003e\n\u003cli\u003eThe only way to see if it is a phone is to compare against Build.MODEL which could (potentially) not work if there's variation\u003c/li\u003e\n\u003cli\u003eIt doesn't identify cases where you're connected to an external device but are controlling an app on the phone (like youtube) rather than something that's playing from that external device\u003c/li\u003e\n\u003c/ol\u003e","answer_count":"0","comment_count":"0","creation_date":"2017-09-21 16:35:43.237 UTC","last_activity_date":"2017-09-21 17:04:31.603 UTC","last_edit_date":"2017-09-21 17:04:31.603 UTC","last_editor_display_name":"","last_editor_user_id":"5026136","owner_display_name":"","owner_user_id":"5026136","post_type_id":"1","score":"0","tags":"android|audio|external|keyevent|android-audiomanager","view_count":"9"} -{"id":"29306346","title":"Best place for UI-related initialization code UITableViewCell with Storyboard","body":"\u003cp\u003eWhat is the best place for UI-related code that should be run on initialization for a \u003ccode\u003eUITableViewCell\u003c/code\u003e subclass? i.e. \u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eself.someLabel.backgroundColor = [UIColor DISBadgeRed];\nself.anotherLabel.layer.cornerRadius = self.unseenMatchesLabel.frameHeight / 2;\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003e\u003cstrong\u003ebackground\u003c/strong\u003e\nIm using a storyboard so the designated initializer \u003ccode\u003e- (instancetype)initWithStyle:(UITableViewCellStyle)style reuseIdentifier:(NSString *)reuseIdentifier\u003c/code\u003e isn't called and in \u003ccode\u003einitWithCoder:\u003c/code\u003e which es called, UI isn't ready for these calls.\u003c/p\u003e\n\n\u003cp\u003eI could call this code from a method that is called within \u003ccode\u003ecellForRow...\u003c/code\u003e but then it would end up being called every time.\u003c/p\u003e","accepted_answer_id":"29306494","answer_count":"1","comment_count":"2","creation_date":"2015-03-27 17:11:02.767 UTC","last_activity_date":"2015-03-27 17:18:11.467 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"922571","post_type_id":"1","score":"1","tags":"ios|objective-c|uitableview","view_count":"107"} +{"id":"29306346","title":"Best place for UI-related initialization code UITableViewCell with Storyboard","body":"\u003cp\u003eWhat is the best place for UI-related code that should be run on initialization for a \u003ccode\u003eUITableViewCell\u003c/code\u003e subclass? i.e. \u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eself.someLabel.backgroundColor = [UIColor DISBadgeRed];\nself.anotherLabel.layer.cornerRadius = self.unseenMatchesLabel.frameHeight / 2;\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003e\u003cstrong\u003ebackground\u003c/strong\u003e\nIm using a storyboard so the designated initializer \u003ccode\u003e- (instancetype)initWithStyle:(UITableViewCellStyle)style reuseIdentifier:(NSString *)reuseIdentifier\u003c/code\u003e isn't called and in \u003ccode\u003einitWithCoder:\u003c/code\u003e which opensearch called, UI isn't ready for these calls.\u003c/p\u003e\n\n\u003cp\u003eI could call this code from a method that is called within \u003ccode\u003ecellForRow...\u003c/code\u003e but then it would end up being called every time.\u003c/p\u003e","accepted_answer_id":"29306494","answer_count":"1","comment_count":"2","creation_date":"2015-03-27 17:11:02.767 UTC","last_activity_date":"2015-03-27 17:18:11.467 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"922571","post_type_id":"1","score":"1","tags":"ios|objective-c|uitableview","view_count":"107"} {"id":"5065727","title":"Reverse engineering tools for Java web applications","body":"\u003cp\u003eI have been searching in the Internet looking for an application which could take a Netbeans Web project and create an UML diagram resulting from these classes. Also, but not essential, I would like a similar tool for the JavaScript code I have in the Java web project I mentioned before. It's such an inferno trying to understand the structure and inner relations of this web project I was given.\u003c/p\u003e","answer_count":"7","comment_count":"0","creation_date":"2011-02-21 12:08:01.257 UTC","last_activity_date":"2014-09-10 13:12:31.52 UTC","last_edit_date":"2014-09-10 13:12:31.52 UTC","last_editor_display_name":"","last_editor_user_id":"467874","owner_display_name":"","owner_user_id":"626527","post_type_id":"1","score":"5","tags":"java|javascript|netbeans|uml|reverse-engineering","view_count":"4285"} {"id":"23872314","title":"Defining Data Model in BusinessObjects 4.0","body":"\u003cp\u003e(I am working with BusinessObjects Information Design Tool, version 4.0)\u003c/p\u003e\n\n\u003cp\u003eI have two fact tables - FACT_MAN and FACT_TOTAL. They are defined as follows:\u003c/p\u003e\n\n\u003cul\u003e\n\u003cli\u003e\u003cstrong\u003eFACT_Man\u003c/strong\u003e:\u003c/li\u003e\n\u003cli\u003e...\u003c/li\u003e\n\u003cli\u003eMP_Key\u003c/li\u003e\n\u003cli\u003ePC_Key\u003c/li\u003e\n\u003cli\u003e\u003cp\u003e...\u003c/p\u003e\u003c/li\u003e\n\u003cli\u003e\u003cp\u003e\u003cstrong\u003eFACT_TOTAL\u003c/strong\u003e:\u003c/p\u003e\u003c/li\u003e\n\u003cli\u003e...\u003c/li\u003e\n\u003cli\u003eMP_Key\u003c/li\u003e\n\u003cli\u003ePC_Key\u003c/li\u003e\n\u003cli\u003e...\u003c/li\u003e\n\u003c/ul\u003e\n\n\u003cp\u003eThere is also \u003cstrong\u003eFLAT\u003c/strong\u003e dimension in the database, defined as follows:\u003c/p\u003e\n\n\u003cul\u003e\n\u003cli\u003e\u003cstrong\u003eFLAT\u003c/strong\u003e\u003c/li\u003e\n\u003cli\u003e...\u003c/li\u003e\n\u003cli\u003eLeaf\u003c/li\u003e\n\u003c/ul\u003e\n\n\u003cp\u003eBoth attributes \u003cstrong\u003eMP_Key\u003c/strong\u003e and \u003cstrong\u003ePC_Key\u003c/strong\u003e from both fact tables are connected to \u003cstrong\u003eLeaf\u003c/strong\u003e key (table \u003cstrong\u003eFLAT\u003c/strong\u003e) as follows:\u003c/p\u003e\n\n\u003cul\u003e\n\u003cli\u003e\u003cstrong\u003eMP_Key\u003c/strong\u003e n:1 \u003cstrong\u003eLeaf\u003c/strong\u003e\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003ePC_Key\u003c/strong\u003e n:1 \u003cstrong\u003eLeaf\u003c/strong\u003e\u003c/li\u003e\n\u003c/ul\u003e\n\n\u003cp\u003eThe question is: \u003c/p\u003e\n\n\u003cul\u003e\n\u003cli\u003eshould I model the universe so that I connect BOTH fact tables only to ONE FLAT dimension, \u003cstrong\u003eOR\u003c/strong\u003e \u003c/li\u003e\n\u003cli\u003eshould I use aliases, \u003cstrong\u003eOR\u003c/strong\u003e\u003c/li\u003e\n\u003cli\u003emaybe solve the problem with contexts, \u003cstrong\u003eOR\u003c/strong\u003e\u003c/li\u003e\n\u003cli\u003emy initial intention was to create only ONE universe with mentioned tables (there are additionaly 3 more FACT tables in the data warehouse). Would that be good approach, or should I maybe create MULTIPLE universes? If yes, what would be a valid reason for that? ... \u003cstrong\u003eOR\u003c/strong\u003e\u003c/li\u003e\n\u003cli\u003eis there some other better approach? \u003c/li\u003e\n\u003c/ul\u003e\n\n\u003cp\u003eSince I don't know which approach to take, could you please elaborate on your answer. Thanks.\u003c/p\u003e","answer_count":"1","comment_count":"3","creation_date":"2014-05-26 14:24:19.15 UTC","last_activity_date":"2014-05-28 19:15:05.667 UTC","last_edit_date":"2014-05-26 14:37:40.763 UTC","last_editor_display_name":"","last_editor_user_id":"1956088","owner_display_name":"","owner_user_id":"1956088","post_type_id":"1","score":"0","tags":"business-objects","view_count":"63"} {"id":"28739189","title":"loading data in datatable on onchange event","body":"\u003cp\u003eI want to implement function in which data will be loaded into datatable after onChange event. So for that I am trying to implement code as below.\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003evar viewdatatab = $('#dataTablesFeedback').dataTable({ \n\n \"columns\": [\n\n { \"data\": \"resourceId\" },\n { \"data\": \"feedbackRecommendation\" },\n { \"data\": \"technicalSkillGaps\" },\n { \"data\": \"technicalAvgSkills\" },\n { \"data\": \"feedbackType\" },\n { \"data\": \"feedbackId\" },\n { \"data\": \"isNew\" },\n\n ]\n }); \n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eWhich is creating my datatable layout and I am calling below function on dropdown change event is :\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003efunction loadFeedback(){\n\nviewdatatabJS = $('#dataTablesFeedback').dataTable({ \n \"processing\" : true,\n \"retrieve\" : true,\n \"ajax\" : \"/nhp/rest/feedback/viewFeedback\",\n \"fnServerParams\": function ( aoData ) {\n aoData.push( { \"name\": \"userName\", \"value\":employeeId } ,\n { \"name\": \"resourceId\", \"value\":mentorDataJson[$('#dropDownId').val()].resourceId });\n }, \n});\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eWhere I am passing some parameter in aoData.push but my URL is not getting called.\u003c/p\u003e","accepted_answer_id":"28827132","answer_count":"1","comment_count":"2","creation_date":"2015-02-26 09:54:12.393 UTC","last_activity_date":"2016-11-03 07:17:03.323 UTC","last_edit_date":"2015-02-28 02:34:43.987 UTC","last_editor_display_name":"","last_editor_user_id":"1407478","owner_display_name":"","owner_user_id":"2852218","post_type_id":"1","score":"0","tags":"javascript|jquery|twitter-bootstrap|jquery-datatables","view_count":"1612"} @@ -1041,7 +1041,7 @@ {"id":"41017590","title":"tkinter frame display basics","body":"\u003cp\u003eI am trying to create an application (well my very first) in Python/tkinter which ultimatively should run on a Raspberry Pi with a a small touchscreen display (480x320px) attached to it. The screen is divided into a mainframe and a frame which (later on) will contain 6 function keys (buttons)\u003c/p\u003e\n\n\u003cp\u003eSo I started with below code, hoping/expecting that somehow I will get the main application window divided into two frames, one being grey, one being black ... but all I see (on my PC) is the MainApp window in correct size, not resizeable, with correct title (so far so good!) and a yellow background ... it seems the frames I defined inside MainApp are not displayed, even thoough there are Labels inside, they are sticky, they have a weight (and I can't remember what else I tried and where else I searched)\u003c/p\u003e\n\n\u003cp\u003eWhat am I overlooking here please?\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e#!/usr/bin/python3\nimport tkinter as tk\n#\n# main application\n#\nclass MainApp(tk.Frame):\n def __init__(self, parent):\n tk.Frame.__init__(self, parent)\n self.parent = parent\n self.parent.title(\"My first GUI\")\n self.parent.geometry(\"480x320\")\n self.parent.resizable(width=False, height=False)\n self.parent.config(bg=\"yellow\")\n\n mainframe = tk.Frame(self, bg=\"grey\", width=480, height=280 )\n mainframe.grid(column=0, row=0, sticky=\"WENS\")\n tk.Label(mainframe, text=\"co-cooo\").grid(column=0, row=0, sticky=\"WENS\")\n\n fkeyframe = tk.Frame(self, bg=\"black\", width=480, height=40)\n fkeyframe.grid(column=0, row=1, sticky=\"WENS\")\n tk.Label(fkeyframe, text=\"fo-fooo\").grid(column=0, row=0, sticky=\"WENS\")\n\n self.rowconfigure(0, weight=1, minsize=280)\n self.rowconfigure(1, weight=1, minsize=40)\n#\n# define root element and start application\n#\ndef main():\n root = tk.Tk()\n app = MainApp(root)\n root.mainloop() \n\n#\n# start if called from command line\n#\nif __name__ == '__main__':\n main()\n\n# 0,0 MainApp (yellow) 480,0\n# +---------------------------------+\n# | mainframe (grey, h=280) |\n# |+-------------------------------+|\n# || ||\n# || ||\n# || ||\n# |+-------------------------------+|\n# | fkeyframe (black, h=40) |\n# |+-------------------------------+|\n# ||+----+----+----+----+----+----+||\n# ||| Bt | Bt | Bt | Bt | Bt | Bt |||\n# ||+----+----+----+----+----+----+||\n# |+-------------------------------+|\n# +---------------------------------+\n# 320,0 320,480\n\u003c/code\u003e\u003c/pre\u003e","accepted_answer_id":"41017682","answer_count":"2","comment_count":"0","creation_date":"2016-12-07 12:32:43.75 UTC","favorite_count":"1","last_activity_date":"2016-12-07 13:39:48.903 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"185547","post_type_id":"1","score":"1","tags":"python|tkinter","view_count":"183"} {"id":"38089178","title":"Is it possible to attach to a remote gdb target with vscode?","body":"\u003cp\u003eI'm trying to setup the configuration to attach to a remote C/C++ gdb target running gdbserver with visual studio code. Is this currently supported? If so, how do I get around these limitations:\u003c/p\u003e\n\n\u003col\u003e\n\u003cli\u003e\u003cp\u003eThe address and port options indicate that they aren't supported for C/C++.\u003c/p\u003e\u003c/li\u003e\n\u003cli\u003e\u003cp\u003eI can force code to use the special remote enabled version of gdb, but its trying to run the target application locally and not connecting to the target gdbserver platform.\u003c/p\u003e\u003c/li\u003e\n\u003cli\u003e\u003cp\u003eWill PowerPC remote targets be supported assuming I can solve #1 and #2?\u003c/p\u003e\u003c/li\u003e\n\u003c/ol\u003e","answer_count":"0","comment_count":"0","creation_date":"2016-06-29 02:08:04.6 UTC","last_activity_date":"2016-06-29 02:08:04.6 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"6526126","post_type_id":"1","score":"2","tags":"gdb|visual-studio-code","view_count":"366"} {"id":"15601598","title":"how to insert and replace a list of words in another list or a string in python","body":"\u003cp\u003eI am trying to replace a string with the word \u003ccode\u003e[NOUN]\u003c/code\u003e on it. I'm clueless!\u003c/p\u003e\n\n\u003cp\u003eHere's my code below - which returns lots of errors - the variable story is a string and listOfNouns is a list - so I try and convert the string into a list by splitting it.:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003edef replacement(story, listOfNouns): \n length = len(story1)\n story1 = story.split()\n for c in range(0,len(story1)):\n if c in listOfNouns:\n story1[c]= 'NOUN'\n story = ''.join(story) \n return story\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eHere's the error message that I get below when I call the above function with\u003cbr\u003e\n\u003ccode\u003ereplacement(\"Let's play marbles\", ['marbles'])\u003c/code\u003e:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eTraceback (most recent call last):\n File \"\u0026lt;pyshell#189\u0026gt;\", line 1, in \u0026lt;module\u0026gt;\n replacement(\"Let's play marbels\", ['marbels'])\n File \"C:/ProblemSet4/exam.py\", line 3, in replacement\n length = len(story1)\nUnboundLocalError: local variable 'story1' referenced before assignment\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eHow can I replace the new story1 list with another element from another list?\u003c/p\u003e\n\n\u003cp\u003eHow do I modify the tuples and return the new string - which is supposed to say:\u003cbr\u003e\n\u003ccode\u003eLet's play [NOUN]\u003c/code\u003e???\u003c/p\u003e\n\n\u003cp\u003eCan anyone please help out? I'm lost and i've been trying this for hours using all the knowledge I have in Python/Java to figure this crap out!\u003c/p\u003e","answer_count":"3","comment_count":"0","creation_date":"2013-03-24 17:23:27.777 UTC","last_activity_date":"2013-03-24 21:05:26.493 UTC","last_edit_date":"2013-03-24 17:43:45.747 UTC","last_editor_display_name":"","last_editor_user_id":"235698","owner_display_name":"","owner_user_id":"2150603","post_type_id":"1","score":"0","tags":"python|string|list|split|replace","view_count":"487"} -{"id":"28932078","title":"Ruby Sinatra app not storing session variable correctly","body":"\u003cp\u003eI am following a guide on learning Ruby and Sinatra which had me write the following code for a card game (based off of \u003ca href=\"https://www.youtube.com/watch?v=9V6mCxl7Up4\" rel=\"nofollow\"\u003ePlay Your Cards Right\u003c/a\u003e).\u003c/p\u003e\n\n\u003cp\u003eHere is the code\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eenable :sessions\n\nget '/' do\n session[:deck] = []\n suits = %w[ Hearts Diamonds Clubs Spades ]\n values = %w[ Ace 2 3 4 5 6 7 8 9 10 Jack Queen King ]\n suits.each do |suit|\n values.each do |value|\n session[:deck] \u0026lt;\u0026lt; \"#{value} of #{suit}\"\n end\n end\n session[:deck].shuffle!\n session[:guesses] = -1\n redirect to('/play')\nend\n\nget '/:guess' do\n\n card = session[:deck].pop\n value = case card[0]\n when \"J\" then 11\n when \"Q\" then 12\n when \"K\" then 13\n else card.to_i\n end\n\n if (params[:guess] == 'higher' and value \u0026lt; session[:value]) or (params[:guess] == 'lower' and value \u0026gt; session[:value])\n \"Game Over! The card was the #{card} (value #{value}), and you had #{session[:value]}. You managed to make #{session[:guesses]} correct guess#{'es' unless session[:guesses] == 1}. \u0026lt;a href='/'\u0026gt;Play Again\u0026lt;/a\u0026gt;\"\n else\n session[:guesses] += 1\n session[:value] = value\n \"The card is the #{card} (value of #{value}). Do you think the next card will be \u0026lt;a href='/higher'\u0026gt;Higher\u0026lt;/a\u0026gt; or \u0026lt;a href='/lower'\u0026gt;Lower\u0026lt;/a\u0026gt;?\"\n end\nend\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eThe game seems to play ok some of the time, however, for some reason, the session variable \u003ccode\u003evalue\u003c/code\u003e seems to be overwritten with a new value before the IF statement executes.\u003c/p\u003e\n\n\u003cp\u003eI have no clue what could be wrong with the above code and why the \u003ccode\u003evalue\u003c/code\u003e session variable is changing to a new value randomly when it shouldn't.\u003c/p\u003e\n\n\u003cp\u003ePlease take a look at my code (or try to run it yourself) and tell me what could be wrong.\u003c/p\u003e\n\n\u003cp\u003eMy \u003ccode\u003econfig.ru\u003c/code\u003e file:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003erequire 'rubygems'\nrequire 'sinatra'\n\nset :environment, ENV['RACK_ENV'].to_sym\ndisable :run, :reload\n\nrequire './play_your_cards_right.rb'\n\nrun Sinatra::Application\n\u003c/code\u003e\u003c/pre\u003e","answer_count":"0","comment_count":"7","creation_date":"2015-03-08 21:25:25.833 UTC","last_activity_date":"2015-03-10 04:05:02.093 UTC","last_edit_date":"2015-03-10 04:05:02.093 UTC","last_editor_display_name":"","last_editor_user_id":"2047843","owner_display_name":"","owner_user_id":"2047843","post_type_id":"1","score":"0","tags":"ruby|sinatra","view_count":"55"} +{"id":"28932078","title":"Ruby Sinatra app not storing session variable correctly","body":"\u003cp\u003eI am following a guide on learning Ruby and Sinatra which had me write the following code for a card game (based off of \u003ca href=\"https://www.youtube.com/watch?v=9V6mCxl7Up4\" rel=\"nofollow\"\u003ePlay Your Cards Right\u003c/a\u003e).\u003c/p\u003e\n\n\u003cp\u003eHere is the code\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eenable :sessions\n\nget '/' do\n session[:deck] = []\n suits = %w[ Hearts Diamonds Clubs Spades ]\n values = %w[ Ace 2 3 4 5 6 7 8 9 10 Jack Queen King ]\n suits.each do |suit|\n values.each do |value|\n session[:deck] \u0026lt;\u0026lt; \"#{value} of #{suit}\"\n end\n end\n session[:deck].shuffle!\n session[:guesses] = -1\n redirect to('/play')\nend\n\nget '/:guess' do\n\n card = session[:deck].pop\n value = case card[0]\n when \"J\" then 11\n when \"Q\" then 12\n when \"K\" then 13\n else card.to_i\n end\n\n if (params[:guess] == 'higher' and value \u0026lt; session[:value]) or (params[:guess] == 'lower' and value \u0026gt; session[:value])\n \"Game Over! The card was the #{card} (value #{value}), and you had #{session[:value]}. You managed to make #{session[:guesses]} correct guess#{'opensearch' unless session[:guesses] == 1}. \u0026lt;a href='/'\u0026gt;Play Again\u0026lt;/a\u0026gt;\"\n else\n session[:guesses] += 1\n session[:value] = value\n \"The card is the #{card} (value of #{value}). Do you think the next card will be \u0026lt;a href='/higher'\u0026gt;Higher\u0026lt;/a\u0026gt; or \u0026lt;a href='/lower'\u0026gt;Lower\u0026lt;/a\u0026gt;?\"\n end\nend\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eThe game seems to play ok some of the time, however, for some reason, the session variable \u003ccode\u003evalue\u003c/code\u003e seems to be overwritten with a new value before the IF statement executes.\u003c/p\u003e\n\n\u003cp\u003eI have no clue what could be wrong with the above code and why the \u003ccode\u003evalue\u003c/code\u003e session variable is changing to a new value randomly when it shouldn't.\u003c/p\u003e\n\n\u003cp\u003ePlease take a look at my code (or try to run it yourself) and tell me what could be wrong.\u003c/p\u003e\n\n\u003cp\u003eMy \u003ccode\u003econfig.ru\u003c/code\u003e file:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003erequire 'rubygems'\nrequire 'sinatra'\n\nset :environment, ENV['RACK_ENV'].to_sym\ndisable :run, :reload\n\nrequire './play_your_cards_right.rb'\n\nrun Sinatra::Application\n\u003c/code\u003e\u003c/pre\u003e","answer_count":"0","comment_count":"7","creation_date":"2015-03-08 21:25:25.833 UTC","last_activity_date":"2015-03-10 04:05:02.093 UTC","last_edit_date":"2015-03-10 04:05:02.093 UTC","last_editor_display_name":"","last_editor_user_id":"2047843","owner_display_name":"","owner_user_id":"2047843","post_type_id":"1","score":"0","tags":"ruby|sinatra","view_count":"55"} {"id":"1135734","title":"It's possible to share a cookie between 'some' subdomains?","body":"\u003cp\u003eI've been reading some posts about web performance, one of the points is to\u003cbr\u003e\nserve static content from a cookie-free domain, my question is:\u003c/p\u003e\n\n\u003cp\u003eCan I share cookies between, let's say example.com and www.example.com, while excluding static1.example.com, static2.example.com, etc?\u003c/p\u003e\n\n\u003cp\u003eOr do I need to set a different top level domain?\u003c/p\u003e\n\n\u003cp\u003eI know (or I think) that I could set the domain of the cookie to '.example.com', but\u003cbr\u003e\ncorrect me if I'm wrong this shares the cookies across \u003cem\u003eall\u003c/em\u003e sub-domains.\u003c/p\u003e","accepted_answer_id":"1135777","answer_count":"3","comment_count":"0","creation_date":"2009-07-16 06:23:51.95 UTC","favorite_count":"6","last_activity_date":"2011-02-23 23:33:38.17 UTC","last_edit_date":"2009-07-16 06:57:45.563 UTC","last_editor_display_name":"","last_editor_user_id":"28169","owner_display_name":"","owner_user_id":"61327","post_type_id":"1","score":"22","tags":"http|cookies","view_count":"14579"} {"id":"34306941","title":"Set font face and size in Scala TextArea","body":"\u003cp\u003eAn old thread (2009) mentioned the following:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eval area = new TextArea {\n font = new Font(\"Arial\", 0, 8)\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eHowever that code has no effect on current version of \u003ccode\u003escala.swing\u003c/code\u003e . I also tried\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003earea.peer.setFont(new Font(\"Arial\", 0,8).\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eThat also had no effect. So what is the correct way?\u003c/p\u003e","accepted_answer_id":"34307048","answer_count":"1","comment_count":"0","creation_date":"2015-12-16 08:17:18.75 UTC","last_activity_date":"2015-12-16 08:24:03.85 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"1056563","post_type_id":"1","score":"1","tags":"swing|scala","view_count":"157"} {"id":"12377119","title":"android tcp client file receive","body":"\u003cp\u003eI am trying to send a file (png to be specific) over sockets from python server to android client. I know that my python server is sending the data, I just can't figure out how to receive the data on the android side. Here is what the code looks like to receive the file.\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e String path = Environment.getExternalStorageDirectory().toString() +\"/tmp/test.png\"; \n try {\n socket = new Socket(\"192.168.1.129\", 29877);\n\n is = socket.getInputStream();\n out = new FileOutputStream(path);\n byte[] temp = new byte[1024];\n for(int c = is.read(temp,0,1024); c \u0026gt; 0; c = is.read(temp,0,1024)){\n out.write(temp,0,c);\n Log.d(\"debug tag\", out.toString());\n }\n Log.d(\"debug tag\", temp.toString());\n\n Bitmap myBitmap = BitmapFactory.decodeByteArray(temp, 0, temp.length);\n imageView.setImageBitmap(myBitmap);\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eThanks for any advice.\u003c/p\u003e","accepted_answer_id":"12377248","answer_count":"1","comment_count":"0","creation_date":"2012-09-11 20:07:44.913 UTC","favorite_count":"1","last_activity_date":"2012-09-11 20:16:16.523 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"1661396","post_type_id":"1","score":"1","tags":"java|android|sockets","view_count":"715"} @@ -1228,7 +1228,7 @@ {"id":"42736290","title":"Choose type of ggplot2 histogram (frequency or density) within a function","body":"\u003cp\u003elong-time reader, first-time question-writer...I have a function that puts takes in data and spits out a ggplot2 histogram with some specific formatting. I'm trying to edit this function so that one of the function parameters can specify whether I want the histogram to show the frequency or the density of the data. I know I can specify this manually inside the \u003ccode\u003egeom_histogram()\u003c/code\u003e function with \u003ccode\u003eaes(y=..count..)\u003c/code\u003e or \u003ccode\u003eaes(y=..density..)\u003c/code\u003e, respectively. But I'm having issues figuring out how to access these variables if they aren't inputted directly.\u003c/p\u003e\n\n\u003cp\u003eHere is a simplified version of what I'm trying to do:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003elibrary(ggplot2)\n\nhistplot \u0026lt;- function(data,density=FALSE) {\n\n if (density) {\n type \u0026lt;- \"..density..\"\n } else {\n type \u0026lt;- \"..count..\"\n }\n\n theplot \u0026lt;- ggplot(data, aes(x=data[,1])) +\n geom_histogram(position=\"identity\",binwidth = 2, \n aes(y=eval(parse(text=type))))\n\n g \u0026lt;- ggplot_gtable(ggplot_build(theplot))\n grid.draw(g)\n\n}\n\nxy \u0026lt;- data.frame(X=rnorm(100,0,10),Y=rnorm(100))\nhistplot(xy)\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eWhen I execute this function, the error I get is:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eError in eval(expr, envir, enclos) : object '..count..' not found\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eI can't figure out why this won't work, because if I do something like the following:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003ex \u0026lt;- 1:5\ny \u0026lt;- \"x\"\neval(parse(text=y))\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eThen the output will be \u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e[1] 1 2 3 4 5\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eMy guess is it has something to do with the environments.\u003c/p\u003e","accepted_answer_id":"42736444","answer_count":"1","comment_count":"1","creation_date":"2017-03-11 14:17:29.33 UTC","last_activity_date":"2017-03-11 14:35:39.15 UTC","last_edit_date":"2017-03-11 14:23:48.643 UTC","last_editor_display_name":"","last_editor_user_id":"7694963","owner_display_name":"","owner_user_id":"7694963","post_type_id":"1","score":"2","tags":"r|ggplot2|histogram","view_count":"120"} {"id":"918619","title":"Redirect batch stderr to file","body":"\u003cp\u003eI have a batch file that executes a java application. I'm trying to modify it so that whenever an exception occurs, it'll write the STDERR out to a file.\u003c/p\u003e\n\n\u003cp\u003eIt looks something like this:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003estart java something.jar method %1 %2 2\u0026gt;\u0026gt; log.txt\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eIs there a way I can write the arguments %1 and %2 to the log.txt file as well? I don't want to write it to the log file everytime this batch file gets called, only when an exception occurs.\u003c/p\u003e\n\n\u003cp\u003eI tried searching for a way to redirect STDERR into a variable, but I couldn't figure it out. Ideally I'd like the log file to look something like:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eBatch file called with parameters:\n- \"first arg\"\n- \"second arg\"\nException: \njava.io.exception etc...\n\n------------------------------------\n\nBatch file called with parameters:\n- \"first arg\"\n- \"second arg\"\nException: \njava.io.exception etc...\n\u003c/code\u003e\u003c/pre\u003e","answer_count":"6","comment_count":"0","creation_date":"2009-05-28 00:24:04.513 UTC","last_activity_date":"2012-08-22 19:59:51.813 UTC","last_edit_date":"2012-08-22 19:59:51.813 UTC","last_editor_display_name":"","last_editor_user_id":"77782","owner_display_name":"","owner_user_id":"103791","post_type_id":"1","score":"10","tags":"windows|batch-file|cmd|stderr","view_count":"18984"} {"id":"3961278","title":"Word wrap a string in multiple lines","body":"\u003cp\u003eI am trying to word wrap a string into multiple lines. Every line will have defined width.\u003c/p\u003e\n\n\u003cp\u003eFor example I would get this result if I word wrap it to an area of 120 pixels in width.\u003c/p\u003e\n\n\u003cblockquote\u003e\n \u003cp\u003eLorem ipsum dolor sit amet,\u003cbr\u003e\n consectetur adipiscing elit. Sed augue\u003cbr\u003e\n velit, tempor non vulputate sit amet,\u003cbr\u003e\n dictum vitae lacus. In vitae ante\u003cbr\u003e\n justo, ut accumsan sem. Donec\u003cbr\u003e\n pulvinar, nisi nec sagittis consequat,\u003cbr\u003e\n sem orci luctus velit, sed elementum\u003cbr\u003e\n ligula ante nec neque. Pellentesque\u003cbr\u003e\n habitant morbi tristique senectus et\u003cbr\u003e\n netus et malesuada fames ac turpis\u003cbr\u003e\n egestas. Etiam erat est, pellentesque\u003cbr\u003e\n eget tincidunt ut, egestas in ante.\u003cbr\u003e\n Nulla vitae vulputate velit. Proin in\u003cbr\u003e\n congue neque. Cras rutrum sodales\u003cbr\u003e\n sapien, ut convallis erat auctor vel.\u003cbr\u003e\n Duis ultricies pharetra dui, sagittis\u003cbr\u003e\n varius mauris tristique a. Nam ut\u003cbr\u003e\n neque id risus tempor hendrerit.\u003cbr\u003e\n Maecenas ut lacus nunc. Nulla\u003cbr\u003e\n fermentum ornare rhoncus. Nulla\u003cbr\u003e\n gravida vestibulum odio, vel commodo\u003cbr\u003e\n magna condimentum quis. Quisque\u003cbr\u003e\n sollicitudin blandit mi, non varius\u003cbr\u003e\n libero lobortis eu. Vestibulum eu\u003cbr\u003e\n turpis massa, id tincidunt orci.\u003cbr\u003e\n Curabitur pellentesque urna non risus\u003cbr\u003e\n adipiscing facilisis. Mauris vel\u003cbr\u003e\n accumsan purus. Proin quis enim nec\u003cbr\u003e\n sem tempor vestibulum ac vitae augue. \u003c/p\u003e\n\u003c/blockquote\u003e","accepted_answer_id":"3961597","answer_count":"8","comment_count":"3","creation_date":"2010-10-18 16:43:18.39 UTC","favorite_count":"5","last_activity_date":"2017-05-22 11:34:48.113 UTC","last_edit_date":"2013-11-20 21:29:47.823 UTC","last_editor_display_name":"","last_editor_user_id":"1043380","owner_display_name":"","owner_user_id":"479531","post_type_id":"1","score":"30","tags":"c#|word-wrap|textwrapping","view_count":"41175"} -{"id":"23433921","title":"Submit form when press enter, not press button","body":"\u003cp\u003eI have this code\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e\u0026lt;form name=\"frmDiagnosticos\" id=\"frmDiagnosticos\" method=\"post\" action=\"agregarDiagnosticoSesion.jsp\"\u0026gt;\n \u0026lt;label for=\"txtCodigoCIE10\" class=\"clase\"\u0026gt;Código CIE-10\u0026lt;/label\u0026gt;\u0026lt;input class=\"codigoCIE10\" type=\"text\" name=\"txtCodigoCIE10\" id=\"txtCodigoCIE10\" /\u0026gt;\n \u0026lt;label for=\"linkAbrirBuscador\" class=\"clase\"\u0026gt;Buscar código CIE-10\u0026lt;/label\u0026gt;\u0026lt;button id=\"linkAbrirBuscador\"\u0026gt;Buscar diagnósticos\u0026lt;/button\u0026gt;\n \u0026lt;label for=\"chkCronica\" class=\"clase\"\u0026gt;Enfermedad crónica\u0026lt;/label\u0026gt;\u0026lt;input type=\"checkbox\" name=\"chkCronica\" id=\"chkCronica\" /\u0026gt;\n \u0026lt;label for=\"chkHabito\" class=\"clase\"\u0026gt;Es un hábito\u0026lt;/label\u0026gt;\u0026lt;input type=\"checkbox\" name=\"chkHabito\" id=\"chkHabito\" /\u0026gt;\n \u0026lt;input type=\"submit\" name=\"btnConfirmarYContinuar\" value=\"Confirmar\" id=\"btnConfirmarYContinuar\" /\u0026gt;\n\u0026lt;/form\u0026gt;\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eWhen I am focused on the first text input (ID: \u003ccode\u003etxtCodigoCIE10\u003c/code\u003e) and I press \u003ckbd\u003eEnter\u003c/kbd\u003e, I want to trigger the event of the submit, not the button (ID: \u003ccode\u003elinkAbrirBuscador\u003c/code\u003e). How can I do?\u003c/p\u003e\n\n\u003cp\u003eI am using jQuery 1.11.\u003c/p\u003e","accepted_answer_id":"23433976","answer_count":"1","comment_count":"2","creation_date":"2014-05-02 17:40:23.11 UTC","last_activity_date":"2015-03-17 05:07:57.74 UTC","last_edit_date":"2015-03-17 05:07:57.74 UTC","last_editor_display_name":"","last_editor_user_id":"2753241","owner_display_name":"","owner_user_id":"2836717","post_type_id":"1","score":"0","tags":"javascript|jquery|html","view_count":"154"} +{"id":"23433921","title":"Submit form when press enter, not press button","body":"\u003cp\u003eI have this code\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e\u0026lt;form name=\"frmDiagnosticos\" id=\"frmDiagnosticos\" method=\"post\" action=\"agregarDiagnosticoSesion.jsp\"\u0026gt;\n \u0026lt;label for=\"txtCodigoCIE10\" class=\"clase\"\u0026gt;Código CIE-10\u0026lt;/label\u0026gt;\u0026lt;input class=\"codigoCIE10\" type=\"text\" name=\"txtCodigoCIE10\" id=\"txtCodigoCIE10\" /\u0026gt;\n \u0026lt;label for=\"linkAbrirBuscador\" class=\"clase\"\u0026gt;Buscar código CIE-10\u0026lt;/label\u0026gt;\u0026lt;button id=\"linkAbrirBuscador\"\u0026gt;Buscar diagnósticos\u0026lt;/button\u0026gt;\n \u0026lt;label for=\"chkCronica\" class=\"clase\"\u0026gt;Enfermedad crónica\u0026lt;/label\u0026gt;\u0026lt;input type=\"checkbox\" name=\"chkCronica\" id=\"chkCronica\" /\u0026gt;\n \u0026lt;label for=\"chkHabito\" class=\"clase\"\u0026gt;opensearch un hábito\u0026lt;/label\u0026gt;\u0026lt;input type=\"checkbox\" name=\"chkHabito\" id=\"chkHabito\" /\u0026gt;\n \u0026lt;input type=\"submit\" name=\"btnConfirmarYContinuar\" value=\"Confirmar\" id=\"btnConfirmarYContinuar\" /\u0026gt;\n\u0026lt;/form\u0026gt;\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eWhen I am focused on the first text input (ID: \u003ccode\u003etxtCodigoCIE10\u003c/code\u003e) and I press \u003ckbd\u003eEnter\u003c/kbd\u003e, I want to trigger the event of the submit, not the button (ID: \u003ccode\u003elinkAbrirBuscador\u003c/code\u003e). How can I do?\u003c/p\u003e\n\n\u003cp\u003eI am using jQuery 1.11.\u003c/p\u003e","accepted_answer_id":"23433976","answer_count":"1","comment_count":"2","creation_date":"2014-05-02 17:40:23.11 UTC","last_activity_date":"2015-03-17 05:07:57.74 UTC","last_edit_date":"2015-03-17 05:07:57.74 UTC","last_editor_display_name":"","last_editor_user_id":"2753241","owner_display_name":"","owner_user_id":"2836717","post_type_id":"1","score":"0","tags":"javascript|jquery|html","view_count":"154"} {"id":"7522365","title":"yield between different processes","body":"\u003cp\u003eI have two C++ codes one called \u003cem\u003ea\u003c/em\u003e and one called \u003cem\u003eb\u003c/em\u003e. I am running in in a 64 bits Linux, using the Boost threading library.\u003c/p\u003e\n\n\u003cp\u003eThe \u003cem\u003ea\u003c/em\u003e code creates 5 threads which stay in a non-ending loop doing some operation.\nThe \u003cem\u003eb\u003c/em\u003e code creates 5 threads which stay in a non-ending loop invoking yield().\u003c/p\u003e\n\n\u003cp\u003eI am on a quadcore machine... When a invoke the \u003cem\u003ea\u003c/em\u003e code alone, it gets almost 400% of the CPU usage. When a invoke the \u003cem\u003eb\u003c/em\u003e code alone, it gets almost 400% of the CPU usage. I already expected it.\u003c/p\u003e\n\n\u003cp\u003eBut when running both together, I was expecting that the \u003cem\u003eb\u003c/em\u003e code used almost nothing of CPU and \u003cem\u003ea\u003c/em\u003e use the 400%. But actually both are using equals slice of the CPU, almost 200%.\u003c/p\u003e\n\n\u003cp\u003eMy question is, doesn't yield() works between different process? Is there a way to make it work the way I expected?\u003c/p\u003e","accepted_answer_id":"7662925","answer_count":"2","comment_count":"2","creation_date":"2011-09-22 22:48:24.233 UTC","favorite_count":"1","last_activity_date":"2011-10-05 14:28:33.94 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"809384","post_type_id":"1","score":"5","tags":"linux|multithreading|boost|g++|yield","view_count":"1235"} {"id":"8974416","title":"Visual Studio Annotate changing line number in view","body":"\u003cp\u003eI'm using Visual Studio 2010 with VB.NET, in a 4.0 project, with Team Foundation Server. When I annotate, not only do I not see line numbers (which is apparently an unfixed bug with VS 2010 - \u003ca href=\"http://connect.microsoft.com/VisualStudio/feedback/details/553557/when-invoking-tfs-annotate-in-visual-studio-there-are-no-line-numbers-shown\"\u003ehttp://connect.microsoft.com/VisualStudio/feedback/details/553557/when-invoking-tfs-annotate-in-visual-studio-there-are-no-line-numbers-shown\u003c/a\u003e) but annotate also advances the screen 10-15 lines, which makes it difficult to find the code I was actually attempting to Annotate.\u003c/p\u003e\n\n\u003cp\u003eHas anyone run into this before? Any chance you've figured out a fix for it? Even knowing I'm not alone would be nice.\u003c/p\u003e","accepted_answer_id":"10386733","answer_count":"2","comment_count":"0","creation_date":"2012-01-23 15:51:52.147 UTC","favorite_count":"1","last_activity_date":"2015-05-19 20:41:36.477 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"459949","post_type_id":"1","score":"6","tags":"visual-studio|visual-studio-2010|tfs|annotate","view_count":"1477"} {"id":"40316184","title":"c++ how to put a pointer to a pointer in dvc","body":"\u003cp\u003eI'm new to c++ (java programmer) and I am working on a homework assignment for an intro course. The purpose is \"Constructors, Dynamic Memory Allocation and\nOverloading Operators\" That being said, I'm really stuck on one of the specifics. \u003c/p\u003e\n\n\u003cp\u003eI'm creating 2 classes Color, and ColorBox. It was specified in the instruction that the member variables in ColorBox are int width, int height and Color** data. My understanding is that data holds reference to a 2D array of Color objects... \u003c/p\u003e\n\n\u003cp\u003eMy question is: \u003cstrong\u003eHow do I set some type of empty or basic value for data in the DVC?\u003c/strong\u003e And does anybody have a link for a decent write up on this kind of pointer? I've found generic write ups on arrays and pointers but I'm still having trouble wrapping my head around this.\u003c/p\u003e\n\n\u003cp\u003eThanks in advance!\u003c/p\u003e\n\n\u003cp\u003eEdit:\nI think I made it work with the code below, but I'll admit I still don't think I know what I'm doing.\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eColorBlock::ColorBlock()\n{\n this-\u0026gt;width = 0;\n this-\u0026gt;height = 0;\n Color temp_data[1][1];\n this-\u0026gt;data = (Color**)temp_data;\n}\n\u003c/code\u003e\u003c/pre\u003e","answer_count":"1","comment_count":"5","creation_date":"2016-10-29 04:59:29.783 UTC","last_activity_date":"2016-10-30 00:19:00.54 UTC","last_edit_date":"2016-10-29 05:31:45.35 UTC","last_editor_display_name":"","last_editor_user_id":"7072116","owner_display_name":"","owner_user_id":"7072116","post_type_id":"1","score":"2","tags":"c++|c++11","view_count":"67"} @@ -1298,7 +1298,7 @@ {"id":"15574528","title":"Cross-compiling R for ARM (Raspberry Pi)","body":"\u003cp\u003eI need to build R (\u003ca href=\"http://www.r-project.org/\" rel=\"nofollow\"\u003ehttp://www.r-project.org/\u003c/a\u003e) for Arch Linux ARM running on Raspberry Pi. I am having trouble running ./configure. I have built my own toolchain using crosstool-ng and it does work, I've compiled other applications with it just fine.\u003c/p\u003e\n\n\u003cp\u003eThe issue appears to be that I cannot link the Fortran libraries to C code. Here is where configure fails:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003echecking for Fortran 77 libraries of gfortran... -L/home/njackson/bcm2708rpi-toolchain/lib -L/home/njackson/bcm2708rpi-toolchain/lib/gcc/arm-rpi-linux-gnueabi/4.7.3 -L/home/njackson/bcm2708rpi-toolchain/arm-rpi-linux-gnueabi/lib -L/usr/lib/gcc/x86_64-linux-gnu/4.6 -L/usr/lib/gcc/x86_64-linux-gnu/4.6/../../../x86_64-linux-gnu -L/usr/lib/gcc/x86_64-linux-gnu/4.6/../../../../lib -L/lib/x86_64-linux-gnu -L/lib/../lib -L/usr/lib/x86_64-linux-gnu -L/usr/lib/../lib -L/usr/lib/gcc/x86_64-linux-gnu/4.6/../../.. -lgfortran -lm /home/njackson/bcm2708rpi-toolchain/arm-rpi-linux-gnueabi/lib/libgfortran.a /home/njackson/bcm2708rpi-toolchain/lib/gcc/arm-rpi-linux-gnueabi/4.7.3/libgcc.a\nchecking for dummy main to link with Fortran 77 libraries... unknown\nconfigure: error: in `/home/njackson/R-2.15.3':\nconfigure: error: linking to Fortran libraries from C fails\nSee `config.log' for more details\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eIt fails here.\u003c/p\u003e\n\n\u003cp\u003eI used the following configure command:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e./configure --host=arm-linux-gnueabihf CC=/home/njackson/bcm2708rpi-toolchain/bin/arm-rpi-linux-gnueabi-gcc CXX=/home/njackson/bcm2708rpi-toolchain/bin/arm-rpi-linux-gnueabi-g++ FC=/home/njackson/bcm2708rpi-toolchain/bin/arm-rpi-linux-gnueabi-gfortran MAIN_LD=/home/njackson/bcm2708rpi-toolchain/bin/arm-rpi-linux-gnueabi-ld --with-readline=no\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eI'd appreciate help getting this compiled. Thanks.\u003c/p\u003e","answer_count":"1","comment_count":"2","creation_date":"2013-03-22 15:49:55.44 UTC","last_activity_date":"2013-03-27 00:44:50.767 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"1730271","post_type_id":"1","score":"0","tags":"c|r|arm|fortran|raspberry-pi","view_count":"704"} {"id":"19160175","title":"How to hide multiple elements","body":"\u003cp\u003eI want to hide multiple elements when I press a button which get the value of checkboxs, and if the checkbox is checked it's hide.\u003c/p\u003e\n\n\u003cp\u003eI have the next code, but it just work with the first element\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003evar checkedInputs = $(\"input:checked\");\nvar test = \"\";\n$.each(checkedInputs, function(i, val) {\n test += val.value+\",\";\n});\ntest = test.substring(0,(test.length-1));\n$(\"#numRow\"+test).hide('slow'); // it should to hide multiple elements, but just work with the first value\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eI also tried with array, but it doen't work too.\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003evar numMsj =[1, 2, 4, 22, 44,90, 100]; \n$.each(numMsg, function (ind, elem) { \n $(\"#numRow\"+elem).hide('slow');\n});\n\u003c/code\u003e\u003c/pre\u003e","accepted_answer_id":"19160250","answer_count":"1","comment_count":"0","creation_date":"2013-10-03 13:14:08.09 UTC","favorite_count":"1","last_activity_date":"2013-10-03 13:30:47.633 UTC","last_edit_date":"2013-10-03 13:30:47.633 UTC","last_editor_display_name":"","last_editor_user_id":"838807","owner_display_name":"","owner_user_id":"3736926","post_type_id":"1","score":"0","tags":"javascript|jquery|arrays|checkbox","view_count":"70"} {"id":"17824786","title":"How can I edit an iframe with proxy?","body":"\u003cp\u003eI need to edit an iframe with jQuery but I'm unable to do it. \u003c/p\u003e\n\n\u003cp\u003eI tried with:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e$(document).ready(function(){\n $(\"#iframe\").on('load',function(){\n $(this).contents().find('body').html('a');\n });\n});\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eThis code doesn't work.\u003c/p\u003e\n\n\u003cp\u003e\u003cstrong\u003eEDIT\u003c/strong\u003e\u003c/p\u003e\n\n\u003cp\u003eThe iframe is related to a different domain.\u003c/p\u003e","accepted_answer_id":"17824855","answer_count":"1","comment_count":"2","creation_date":"2013-07-24 03:36:47.287 UTC","favorite_count":"1","last_activity_date":"2015-06-07 13:41:28.57 UTC","last_edit_date":"2015-06-07 13:41:28.57 UTC","last_editor_display_name":"","last_editor_user_id":"4458531","owner_display_name":"","owner_user_id":"2528167","post_type_id":"1","score":"2","tags":"php|iframe|proxy","view_count":"3800"} -{"id":"26528395","title":"How to Install and configure Redis on ElasticBeanstalk","body":"\u003cp\u003eHow do I install and configure Redis on AWS ElasticBeanstalk? Does anyone know how to write an .ebextension script to accomplish that?\u003c/p\u003e","accepted_answer_id":"27577826","answer_count":"2","comment_count":"0","creation_date":"2014-10-23 12:43:48.053 UTC","favorite_count":"1","last_activity_date":"2017-04-18 18:46:49.853 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"839818","post_type_id":"1","score":"7","tags":"redis|elastic-beanstalk","view_count":"6754"} +{"id":"26528395","title":"How to Install and configure Redis on ElasticBeanstalk","body":"\u003cp\u003eHow do I install and configure Redis on AWS ElasticBeanstalk? Does anyone know how to write an .ebextension script to accomplish that?\u003c/p\u003e","accepted_answer_id":"27577826","answer_count":"2","comment_count":"0","creation_date":"2014-10-23 12:43:48.053 UTC","favorite_count":"1","last_activity_date":"2017-04-18 18:46:49.853 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"839818","post_type_id":"1","score":"7","tags":"redis|opensearch-beanstalk","view_count":"6754"} {"id":"44657875","title":"Offsetting HTML anchors with a fixed header on page","body":"\u003cp\u003eI am currently working on a page that has a fixed header and multiple anchor tags to jump to different sections on the page. I have searched around and found one solution that seems to work the way I would like using CSS primarily, however despite having the the anchors relatively positioned it seems to keep going back to the same location on the page. I would like to have all the links scroll appropriately down to each respective section, and I tried making separate classes for each respective link, but that just took me back to the same location on the page. \u003c/p\u003e\n\n\u003cp\u003eHere is the page in question:\n\u003ca href=\"http://www.aticourses.com/scheduleNew.html\" rel=\"nofollow noreferrer\"\u003ehttp://www.aticourses.com/scheduleNew.html\u003c/a\u003e\u003c/p\u003e\n\n\u003cp\u003e...and the accompanying css\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e.anchor{\ndisplay: block;\nposition: relative;\ntop: -200px;\nvisibility: hidden;\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003e}\u003c/p\u003e\n\n\u003cp\u003eAm I going to have to create different anchors for each respective section?\u003c/p\u003e\n\n\u003cp\u003eThanks. \u003c/p\u003e","answer_count":"0","comment_count":"0","creation_date":"2017-06-20 15:59:44.293 UTC","last_activity_date":"2017-06-20 15:59:44.293 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"7921641","post_type_id":"1","score":"0","tags":"javascript|html|css|scroll","view_count":"19"} {"id":"253468","title":"What's the best way to get the directory from which an assembly is executing","body":"\u003cp\u003eFor my apps, I store some configuration file in xml along with the assembly(exe), and something other temporary files for proccessing purpose. \u003c/p\u003e\n\n\u003cp\u003eI found some quirk with \u003ccode\u003e\".\\\\\"\u003c/code\u003e and \u003ccode\u003eApplication.StartupPath\u003c/code\u003e.\u003c/p\u003e\n\n\u003cp\u003eI've been using \u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eString configPath = \".\\\\config.xml\";\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eIt works fine until I called \u003ccode\u003eOpenFIleDialog\u003c/code\u003e to open some files in other folders, the statement above failed. Apparently \".\\\" is referring to \"CurrentDirectory\", which changes every time when we browse to another folder.\u003c/p\u003e\n\n\u003cp\u003eAt some point, I was using \u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eString configPath = Path.Combine(Application.StartupPath + \"config.xml\");\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eAt some point, when I need to execute this assembly from another folder by using \u003ccode\u003eProcess.Start()\u003c/code\u003e, things start to fall apart. Apparently the working directory is not set properly, and \u003ccode\u003eApplication.StartupPath\u003c/code\u003e is actually referring to working directory instead of the directory of which the assembly is executing, as I've assumed. So I have to resort to using ProcessInfo to setup the working directory to the assembly's directory. I also had problem with this when I was writing VSTO.\u003c/p\u003e\n\n\u003cp\u003eSo, my question is, what's the best, simplest and most assured way to get the current directory that the assembly is executing, without those quirks(or misunderstanding) that I've just mentioned?\u003c/p\u003e\n\n\u003cp\u003eEDIT: I meant to get the directory which the assembly reside\u003c/p\u003e\n\n\u003cp\u003eEDIT: According to MSDN on \u003ca href=\"http://msdn.microsoft.com/en-us/library/system.appdomain.basedirectory.aspx\" rel=\"nofollow noreferrer\"\u003eAppDomain.BaseDirectory\u003c/a\u003e, it seems that it can be changes during runtime, which is what I don't want(Just to clarify, not that I don't want to allow changing BaseDirectory, but rather, when I retrieve it without knowing for sure whether it's been changed)\u003c/p\u003e\n\n\u003cp\u003eEDIT: I've notice that a related question was posted much earlier. \u003ca href=\"https://stackoverflow.com/questions/158219/what-would-cause-the-current-directory-of-an-executing-app-to-change\"\u003eWhat would cause the current directory of an executing app to change?\u003c/a\u003e\u003c/p\u003e\n\n\u003cp\u003eThanks guys for the answer.\u003c/p\u003e","accepted_answer_id":"253504","answer_count":"7","comment_count":"0","creation_date":"2008-10-31 13:29:57.487 UTC","favorite_count":"6","last_activity_date":"2016-11-19 03:32:26.21 UTC","last_edit_date":"2017-05-23 12:34:09.103 UTC","last_editor_display_name":"faulty","last_editor_user_id":"-1","owner_display_name":"faulty","owner_user_id":"20007","post_type_id":"1","score":"16","tags":".net|directory","view_count":"21991"} {"id":"11674622","title":"Converting a awk 2D array with counts into hashmap in java","body":"\u003cp\u003eI found this problem so interesting. I am using an awk 2D array that has a key,value,count of the same. and that is being printed to a file. This file is in the below format\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eA100|B100|3\nA100|C100|2\nA100|B100|5\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eNow I have a file like this .. My motive is to convert this file into a hash map so that the final output from the hash map is.\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eA100|B100|8\nA100|C100|2\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eJust an aggregation\u003c/p\u003e\n\n\u003cp\u003eThe challenge is, this one has 3 dimensions and not two. I did have an another file in the below format which is \u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eD100|4\nH100|5\nD100|6\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eI easily aggregated the above as it is only 2D and I used the below code to do that \u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eString[] fields= strLine.trim().split(\"\\\\|\");\nif(hashmap.containsKey(fields[0]))\n{\n//Update the value of the key here\nhashmap.put(fields[0],hashmap.get(fields[0]) + Integer.parseInt(fields[1]));\n}\nelse\n{\n//Inserting the key to the map\nhashmap.put(fields[0],Integer.parseInt(fields[1]));\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eSo this was quite simple for implementation.\u003c/p\u003e\n\n\u003cp\u003eBut when it comes to 3D I have to have an another check inside.. My idea for this is to maintain a [B100,5(beanObject[5])]\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e Map\u0026lt;String,beanClassObject\u0026gt; hashmap=new Map\u0026lt;String,beanClassObject\u0026gt;();\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003esecongField hash map which has been used in the code that has a mapping relation between the created ben Object subscript and the key as the second field \"For instance it is \"\u003c/p\u003e\n\n\u003cp\u003eThis bean class would have the getter and setter method for the 2nd and 3rd fields of the file. I hope I am clear with this point. So the implementation of this would be \u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eif(hashmap.containsKey(fields[0]))\n{\n **//Have to check whether the the particular key value pair already exists ... I dint find any method for this ... Just a normal iteration is there .. Could you ppl guide me regarding this**\n\n //Update the value of the key here\n secondFieldHashMap.get(fields[1]).COunt= secondFieldHashMap.get(fields[1]).getCOunt+ Integer.parseInt(fields[2]));\n }\n else\n {\n //Inserting the key to the map\n hashmap.put(fields[0],Integer.parseInt(fields[1]));\n secondFieldHashMap.get(fields[1]).COunt= Integer.parseInt(fields[2]));\n }\nelse\n{\n\n // This meands there is no key field\n // Hence insert the key field and also update the count of seconfFieldHashMap as done previously.\n }\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eCOuld you ppl please throw some ideas regarding this. Thank you\u003c/p\u003e","accepted_answer_id":"11674798","answer_count":"1","comment_count":"0","creation_date":"2012-07-26 17:19:58.39 UTC","last_activity_date":"2012-07-26 19:17:50.883 UTC","last_edit_date":"2012-07-26 19:17:50.883 UTC","last_editor_display_name":"","last_editor_user_id":"620097","owner_display_name":"","owner_user_id":"1305675","post_type_id":"1","score":"0","tags":"java|awk|hashmap","view_count":"275"} @@ -1338,7 +1338,7 @@ {"id":"41320700","title":"javax/servlet/ServletContext : Unsupported major.minor version 52.0","body":"\u003cp\u003eI am trying to run simple app engine guestbook app modified for Spring\nI am running od InteliJ IDEA\u003c/p\u003e\n\n\u003cp\u003eSince it is AppEngine, I need to run on JAVA 7\nWhen I am compiling with Java 8 compiler, I have no problem. However when compiling with Java 7, I get this:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eorg.springframework.beans.BeanInstantiationException: Failed to instantiate [org.springframework.test.context.web.WebDelegatingSmartContextLoader]: Constructor threw exception; nested exception is java.lang.UnsupportedClassVersionError: javax/servlet/ServletContext : Unsupported major.minor version 52.0\nat org.springframework.beans.BeanUtils.instantiateClass(BeanUtils.java:154)\nat org.springframework.beans.BeanUtils.instantiateClass(BeanUtils.java:102)\nat org.springframework.beans.BeanUtils.instantiateClass(BeanUtils.java:125)\nat org.springframework.test.context.support.AbstractTestContextBootstrapper.resolveContextLoader(AbstractTestContextBootstrapper.java:474)\nat org.springframework.test.context.support.AbstractTestContextBootstrapper.buildMergedContextConfiguration(AbstractTestContextBootstrapper.java:359)\nat org.springframework.test.context.support.AbstractTestContextBootstrapper.buildMergedContextConfiguration(AbstractTestContextBootstrapper.java:305)\nat org.springframework.test.context.support.AbstractTestContextBootstrapper.buildTestContext(AbstractTestContextBootstrapper.java:112)\nat org.springframework.test.context.TestContextManager.\u0026lt;init\u0026gt;(TestContextManager.java:120)\nat org.springframework.test.context.TestContextManager.\u0026lt;init\u0026gt;(TestContextManager.java:105)\nat org.springframework.test.context.junit4.SpringJUnit4ClassRunner.createTestContextManager(SpringJUnit4ClassRunner.java:152)\nat org.springframework.test.context.junit4.SpringJUnit4ClassRunner.\u0026lt;init\u0026gt;(SpringJUnit4ClassRunner.java:143)\nat sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)\nat sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)\nat sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)\nat java.lang.reflect.Constructor.newInstance(Constructor.java:526)\nat org.junit.internal.builders.AnnotatedBuilder.buildRunner(AnnotatedBuilder.java:104)\nat org.junit.internal.builders.AnnotatedBuilder.runnerForClass(AnnotatedBuilder.java:86)\nat org.junit.runners.model.RunnerBuilder.safeRunnerForClass(RunnerBuilder.java:59)\nat org.junit.internal.builders.AllDefaultPossibilitiesBuilder.runnerForClass(AllDefaultPossibilitiesBuilder.java:26)\nat org.junit.runners.model.RunnerBuilder.safeRunnerForClass(RunnerBuilder.java:59)\nat org.junit.internal.requests.ClassRequest.getRunner(ClassRequest.java:33)\nat org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:250)\nat org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:141)\nat org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:112)\nat sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)\nat sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)\nat sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)\nat java.lang.reflect.Method.invoke(Method.java:606)\nat org.apache.maven.surefire.util.ReflectionUtils.invokeMethodWithArray(ReflectionUtils.java:189)\nat org.apache.maven.surefire.booter.ProviderFactory$ProviderProxy.invoke(ProviderFactory.java:165)\nat org.apache.maven.surefire.booter.ProviderFactory.invokeProvider(ProviderFactory.java:85)\nat org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:115)\nat org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:75)\nCaused by: java.lang.UnsupportedClassVersionError: javax/servlet/ServletContext : Unsupported major.minor version 52.0\n at java.lang.ClassLoader.defineClass1(Native Method)\n at java.lang.ClassLoader.defineClass(ClassLoader.java:800)\n at java.security.SecureClassLoader.defineClass(SecureClassLoader.java:142)\n at java.net.URLClassLoader.defineClass(URLClassLoader.java:449)\n at java.net.URLClassLoader.access$100(URLClassLoader.java:71)\n at java.net.URLClassLoader$1.run(URLClassLoader.java:361)\n at java.net.URLClassLoader$1.run(URLClassLoader.java:355)\n at java.security.AccessController.doPrivileged(Native Method)\n at java.net.URLClassLoader.findClass(URLClassLoader.java:354)\n at java.lang.ClassLoader.loadClass(ClassLoader.java:425)\n at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308)\n at java.lang.ClassLoader.loadClass(ClassLoader.java:358)\n at org.springframework.test.context.web.WebDelegatingSmartContextLoader.\u0026lt;init\u0026gt;(WebDelegatingSmartContextLoader.java:63)\n at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)\n at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)\n at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)\n at java.lang.reflect.Constructor.newInstance(Constructor.java:526)\n at org.springframework.beans.BeanUtils.instantiateClass(BeanUtils.java:142)\n ... 32 more\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eHere is my Pom\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e\u0026lt;?xml version=\"1.0\" encoding=\"UTF-8\"?\u0026gt;\u0026lt;project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\"\u0026gt;\n\n\u0026lt;modelVersion\u0026gt;4.0.0\u0026lt;/modelVersion\u0026gt;\n\u0026lt;packaging\u0026gt;war\u0026lt;/packaging\u0026gt;\n\u0026lt;version\u0026gt;0.1-SNAPSHOT\u0026lt;/version\u0026gt;\n\n\u0026lt;groupId\u0026gt;XXXXX\u0026lt;/groupId\u0026gt;\n\u0026lt;artifactId\u0026gt;XXXXX\u0026lt;/artifactId\u0026gt;\n\n\u0026lt;properties\u0026gt;\n \u0026lt;app.id\u0026gt;XXXXX\u0026lt;/app.id\u0026gt;\n \u0026lt;app.version\u0026gt;1\u0026lt;/app.version\u0026gt;\n \u0026lt;appengine.version\u0026gt;1.9.46\u0026lt;/appengine.version\u0026gt;\n \u0026lt;gcloud.plugin.version\u0026gt;2.0.9.74.v20150814\u0026lt;/gcloud.plugin.version\u0026gt;\n\n \u0026lt;objectify.version\u0026gt;5.1.13\u0026lt;/objectify.version\u0026gt;\n \u0026lt;guava.version\u0026gt;20.0\u0026lt;/guava.version\u0026gt;\n\n \u0026lt;project.build.sourceEncoding\u0026gt;UTF-8\u0026lt;/project.build.sourceEncoding\u0026gt;\n \u0026lt;maven.compiler.showDeprecation\u0026gt;true\u0026lt;/maven.compiler.showDeprecation\u0026gt;\n \u0026lt;spring.framework.version\u0026gt; 4.3.4.RELEASE\u0026lt;/spring.framework.version\u0026gt;\n \u0026lt;jackson.version\u0026gt;2.8.5\u0026lt;/jackson.version\u0026gt;\n \u0026lt;javax.servlet.api.version\u0026gt;4.0.0-b01\u0026lt;/javax.servlet.api.version\u0026gt;\n\n \u0026lt;maven.compiler.source\u0026gt;1.7\u0026lt;/maven.compiler.source\u0026gt;\n \u0026lt;maven.compiler.target\u0026gt;1.7\u0026lt;/maven.compiler.target\u0026gt;\n\u0026lt;/properties\u0026gt;\n\n\u0026lt;prerequisites\u0026gt;\n \u0026lt;maven\u0026gt;3.1.0\u0026lt;/maven\u0026gt;\n\u0026lt;/prerequisites\u0026gt;\n\n\u0026lt;dependencies\u0026gt;\n \u0026lt;!-- Compile/runtime dependencies --\u0026gt;\n \u0026lt;dependency\u0026gt;\n \u0026lt;groupId\u0026gt;com.google.appengine\u0026lt;/groupId\u0026gt;\n \u0026lt;artifactId\u0026gt;appengine-api-1.0-sdk\u0026lt;/artifactId\u0026gt;\n \u0026lt;version\u0026gt;${appengine.version}\u0026lt;/version\u0026gt;\n \u0026lt;/dependency\u0026gt;\n \u0026lt;dependency\u0026gt;\n \u0026lt;groupId\u0026gt;javax.servlet\u0026lt;/groupId\u0026gt;\n \u0026lt;artifactId\u0026gt;javax.servlet-api\u0026lt;/artifactId\u0026gt;\n \u0026lt;version\u0026gt;${javax.servlet.api.version}\u0026lt;/version\u0026gt;\n \u0026lt;scope\u0026gt;provided\u0026lt;/scope\u0026gt;\n \u0026lt;/dependency\u0026gt;\n \u0026lt;dependency\u0026gt;\n \u0026lt;groupId\u0026gt;jstl\u0026lt;/groupId\u0026gt;\n \u0026lt;artifactId\u0026gt;jstl\u0026lt;/artifactId\u0026gt;\n \u0026lt;version\u0026gt;1.2\u0026lt;/version\u0026gt;\n \u0026lt;/dependency\u0026gt;\n\n \u0026lt;!-- [START Spring Dependencies] --\u0026gt;\n \u0026lt;dependency\u0026gt;\n \u0026lt;groupId\u0026gt;org.springframework\u0026lt;/groupId\u0026gt;\n \u0026lt;artifactId\u0026gt;spring-core\u0026lt;/artifactId\u0026gt;\n \u0026lt;version\u0026gt;${spring.framework.version}\u0026lt;/version\u0026gt;\n \u0026lt;/dependency\u0026gt;\n \u0026lt;dependency\u0026gt;\n \u0026lt;groupId\u0026gt;org.springframework\u0026lt;/groupId\u0026gt;\n \u0026lt;artifactId\u0026gt;spring-web\u0026lt;/artifactId\u0026gt;\n \u0026lt;version\u0026gt;${spring.framework.version}\u0026lt;/version\u0026gt;\n \u0026lt;/dependency\u0026gt;\n \u0026lt;dependency\u0026gt;\n \u0026lt;groupId\u0026gt;org.springframework\u0026lt;/groupId\u0026gt;\n \u0026lt;artifactId\u0026gt;spring-webmvc\u0026lt;/artifactId\u0026gt;\n \u0026lt;version\u0026gt;${spring.framework.version}\u0026lt;/version\u0026gt;\n \u0026lt;/dependency\u0026gt;\n \u0026lt;!-- [END Spring Dependencies] --\u0026gt;\n\n \u0026lt;!-- [START Objectify_Dependencies] --\u0026gt;\n \u0026lt;dependency\u0026gt;\n \u0026lt;groupId\u0026gt;com.google.guava\u0026lt;/groupId\u0026gt;\n \u0026lt;artifactId\u0026gt;guava\u0026lt;/artifactId\u0026gt;\n \u0026lt;version\u0026gt;${guava.version}\u0026lt;/version\u0026gt;\n \u0026lt;/dependency\u0026gt;\n \u0026lt;dependency\u0026gt;\n \u0026lt;groupId\u0026gt;com.googlecode.objectify\u0026lt;/groupId\u0026gt;\n \u0026lt;artifactId\u0026gt;objectify\u0026lt;/artifactId\u0026gt;\n \u0026lt;version\u0026gt;${objectify.version}\u0026lt;/version\u0026gt;\n \u0026lt;/dependency\u0026gt;\n \u0026lt;!-- [END Objectify_Dependencies] --\u0026gt;\n\n \u0026lt;!-- Test Dependencies --\u0026gt;\n \u0026lt;dependency\u0026gt;\n \u0026lt;groupId\u0026gt;junit\u0026lt;/groupId\u0026gt;\n \u0026lt;artifactId\u0026gt;junit\u0026lt;/artifactId\u0026gt;\n \u0026lt;version\u0026gt;4.12\u0026lt;/version\u0026gt;\n \u0026lt;scope\u0026gt;test\u0026lt;/scope\u0026gt;\n \u0026lt;/dependency\u0026gt;\n \u0026lt;dependency\u0026gt;\n \u0026lt;groupId\u0026gt;org.mockito\u0026lt;/groupId\u0026gt;\n \u0026lt;artifactId\u0026gt;mockito-all\u0026lt;/artifactId\u0026gt;\n \u0026lt;version\u0026gt;2.0.2-beta\u0026lt;/version\u0026gt;\n \u0026lt;scope\u0026gt;test\u0026lt;/scope\u0026gt;\n \u0026lt;/dependency\u0026gt;\n \u0026lt;dependency\u0026gt;\n \u0026lt;groupId\u0026gt;com.google.appengine\u0026lt;/groupId\u0026gt;\n \u0026lt;artifactId\u0026gt;appengine-testing\u0026lt;/artifactId\u0026gt;\n \u0026lt;version\u0026gt;${appengine.version}\u0026lt;/version\u0026gt;\n \u0026lt;scope\u0026gt;test\u0026lt;/scope\u0026gt;\n \u0026lt;/dependency\u0026gt;\n \u0026lt;dependency\u0026gt;\n \u0026lt;groupId\u0026gt;com.google.appengine\u0026lt;/groupId\u0026gt;\n \u0026lt;artifactId\u0026gt;appengine-api-stubs\u0026lt;/artifactId\u0026gt;\n \u0026lt;version\u0026gt;${appengine.version}\u0026lt;/version\u0026gt;\n \u0026lt;scope\u0026gt;test\u0026lt;/scope\u0026gt;\n \u0026lt;/dependency\u0026gt;\n \u0026lt;dependency\u0026gt;\n \u0026lt;groupId\u0026gt;org.springframework\u0026lt;/groupId\u0026gt;\n \u0026lt;artifactId\u0026gt;spring-test\u0026lt;/artifactId\u0026gt;\n \u0026lt;version\u0026gt;${spring.framework.version}\u0026lt;/version\u0026gt;\n \u0026lt;scope\u0026gt;test\u0026lt;/scope\u0026gt;\n \u0026lt;/dependency\u0026gt;\n \u0026lt;dependency\u0026gt;\n \u0026lt;groupId\u0026gt;com.fasterxml.jackson.core\u0026lt;/groupId\u0026gt;\n \u0026lt;artifactId\u0026gt;jackson-core\u0026lt;/artifactId\u0026gt;\n \u0026lt;version\u0026gt;${jackson.version}\u0026lt;/version\u0026gt;\n \u0026lt;/dependency\u0026gt;\n \u0026lt;dependency\u0026gt;\n \u0026lt;groupId\u0026gt;com.fasterxml.jackson.core\u0026lt;/groupId\u0026gt;\n \u0026lt;artifactId\u0026gt;jackson-databind\u0026lt;/artifactId\u0026gt;\n \u0026lt;version\u0026gt;${jackson.version}\u0026lt;/version\u0026gt;\n \u0026lt;/dependency\u0026gt;\n \u0026lt;dependency\u0026gt;\n \u0026lt;groupId\u0026gt;com.fasterxml.jackson.core\u0026lt;/groupId\u0026gt;\n \u0026lt;artifactId\u0026gt;jackson-annotations\u0026lt;/artifactId\u0026gt;\n \u0026lt;version\u0026gt;${jackson.version}\u0026lt;/version\u0026gt;\n \u0026lt;/dependency\u0026gt;\n \u0026lt;dependency\u0026gt;\n \u0026lt;groupId\u0026gt;org.hamcrest\u0026lt;/groupId\u0026gt;\n \u0026lt;artifactId\u0026gt;hamcrest-all\u0026lt;/artifactId\u0026gt;\n \u0026lt;version\u0026gt;1.3\u0026lt;/version\u0026gt;\n \u0026lt;scope\u0026gt;test\u0026lt;/scope\u0026gt;\n \u0026lt;/dependency\u0026gt;\n\n\u0026lt;/dependencies\u0026gt;\n\n\u0026lt;build\u0026gt;\n \u0026lt;!-- for hot reload of the web application--\u0026gt;\n \u0026lt;outputDirectory\u0026gt;${project.build.directory}/${project.build.finalName}/WEB-INF/classes\u0026lt;/outputDirectory\u0026gt;\n \u0026lt;plugins\u0026gt;\n \u0026lt;plugin\u0026gt;\n \u0026lt;groupId\u0026gt;org.codehaus.mojo\u0026lt;/groupId\u0026gt;\n \u0026lt;artifactId\u0026gt;versions-maven-plugin\u0026lt;/artifactId\u0026gt;\n \u0026lt;version\u0026gt;2.1\u0026lt;/version\u0026gt;\n \u0026lt;executions\u0026gt;\n \u0026lt;execution\u0026gt;\n \u0026lt;phase\u0026gt;compile\u0026lt;/phase\u0026gt;\n \u0026lt;goals\u0026gt;\n \u0026lt;goal\u0026gt;display-dependency-updates\u0026lt;/goal\u0026gt;\n \u0026lt;goal\u0026gt;display-plugin-updates\u0026lt;/goal\u0026gt;\n \u0026lt;/goals\u0026gt;\n \u0026lt;/execution\u0026gt;\n \u0026lt;/executions\u0026gt;\n \u0026lt;/plugin\u0026gt;\n \u0026lt;plugin\u0026gt;\n \u0026lt;groupId\u0026gt;org.apache.maven.plugins\u0026lt;/groupId\u0026gt;\n \u0026lt;version\u0026gt;3.6\u0026lt;/version\u0026gt;\n \u0026lt;artifactId\u0026gt;maven-compiler-plugin\u0026lt;/artifactId\u0026gt;\n \u0026lt;configuration\u0026gt;\n \u0026lt;source\u0026gt;1.7\u0026lt;/source\u0026gt;\n \u0026lt;target\u0026gt;1.7\u0026lt;/target\u0026gt;\n \u0026lt;compilerVersion\u0026gt;1.7\u0026lt;/compilerVersion\u0026gt;\n \u0026lt;/configuration\u0026gt;\n \u0026lt;/plugin\u0026gt;\n\n \u0026lt;plugin\u0026gt;\n \u0026lt;groupId\u0026gt;org.apache.maven.plugins\u0026lt;/groupId\u0026gt;\n \u0026lt;artifactId\u0026gt;maven-war-plugin\u0026lt;/artifactId\u0026gt;\n \u0026lt;version\u0026gt;3.0.0\u0026lt;/version\u0026gt;\n \u0026lt;configuration\u0026gt;\n \u0026lt;archiveClasses\u0026gt;true\u0026lt;/archiveClasses\u0026gt;\n \u0026lt;webResources\u0026gt;\n \u0026lt;!-- in order to interpolate version from pom into appengine-web.xml --\u0026gt;\n \u0026lt;resource\u0026gt;\n \u0026lt;directory\u0026gt;${basedir}/src/main/webapp/WEB-INF\u0026lt;/directory\u0026gt;\n \u0026lt;filtering\u0026gt;true\u0026lt;/filtering\u0026gt;\n \u0026lt;targetPath\u0026gt;WEB-INF\u0026lt;/targetPath\u0026gt;\n \u0026lt;/resource\u0026gt;\n \u0026lt;/webResources\u0026gt;\n \u0026lt;/configuration\u0026gt;\n \u0026lt;/plugin\u0026gt;\n\n \u0026lt;plugin\u0026gt;\n \u0026lt;groupId\u0026gt;com.google.appengine\u0026lt;/groupId\u0026gt;\n \u0026lt;artifactId\u0026gt;appengine-maven-plugin\u0026lt;/artifactId\u0026gt;\n \u0026lt;version\u0026gt;${appengine.version}\u0026lt;/version\u0026gt;\n \u0026lt;configuration\u0026gt;\n \u0026lt;enableJarClasses\u0026gt;false\u0026lt;/enableJarClasses\u0026gt;\n \u0026lt;version\u0026gt;${app.version}\u0026lt;/version\u0026gt;\n \u0026lt;!-- Comment in the below snippet to bind to all IPs instead of just localhost --\u0026gt;\n \u0026lt;!-- address\u0026gt;0.0.0.0\u0026lt;/address\u0026gt;\n \u0026lt;port\u0026gt;8080\u0026lt;/port --\u0026gt;\n \u0026lt;!-- Comment in the below snippet to enable local debugging with a remote debugger\n like those included with Eclipse or IntelliJ --\u0026gt;\n \u0026lt;!-- jvmFlags\u0026gt;\n \u0026lt;jvmFlag\u0026gt;-agentlib:jdwp=transport=dt_socket,address=8000,server=y,suspend=n\u0026lt;/jvmFlag\u0026gt;\n \u0026lt;/jvmFlags --\u0026gt;\n \u0026lt;/configuration\u0026gt;\n \u0026lt;/plugin\u0026gt;\n \u0026lt;plugin\u0026gt;\n \u0026lt;groupId\u0026gt;com.google.appengine\u0026lt;/groupId\u0026gt;\n \u0026lt;artifactId\u0026gt;gcloud-maven-plugin\u0026lt;/artifactId\u0026gt;\n \u0026lt;version\u0026gt;${gcloud.plugin.version}\u0026lt;/version\u0026gt;\n \u0026lt;configuration\u0026gt;\n \u0026lt;set_default\u0026gt;true\u0026lt;/set_default\u0026gt;\n \u0026lt;/configuration\u0026gt;\n \u0026lt;/plugin\u0026gt;\n \u0026lt;/plugins\u0026gt;\n\n \u0026lt;testResources\u0026gt;\n \u0026lt;testResource\u0026gt;\n \u0026lt;directory\u0026gt;${basedir}/src/main/webapp\u0026lt;/directory\u0026gt;\n \u0026lt;/testResource\u0026gt;\n \u0026lt;/testResources\u0026gt;\n\u0026lt;/build\u0026gt;\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003e\u003c/p\u003e\n\n\u003cp\u003eMy JAVA_HOME is iset to Java7, inside project structure Project SDK is defined for Java7.\nJDK for maven importer is Java77, JRE for maven runner is set to Project JDK\u003c/p\u003e\n\n\u003cp\u003eCan you please tell me how it is possible that I get this error, even if I have everything set to Java 7? And also how to fix it :)\u003c/p\u003e\n\n\u003cp\u003eThanks\u003c/p\u003e","accepted_answer_id":"41320803","answer_count":"1","comment_count":"1","creation_date":"2016-12-25 11:22:53.06 UTC","last_activity_date":"2016-12-25 11:38:10.007 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"2028394","post_type_id":"1","score":"-1","tags":"spring|maven|google-app-engine|servlets|intellij-idea","view_count":"181"} {"id":"10941388","title":"Can Short2 be used on WP7 for vertex positions?","body":"\u003cp\u003eI'm having trouble using Short2 for the (x,y) positions in my vertex data. This is my vertex structure:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003estruct VertexPositionShort : IVertexType\n{\n private static VertexElement[]\n vertexElements = new VertexElement[]\n {\n new VertexElement(0, VertexElementFormat.Short2, VertexElementUsage.Position, 0),\n };\n private static VertexDeclaration\n vertexDeclaration = new VertexDeclaration(vertexElements);\n\n public Short2\n Position;\n\n\n public static VertexDeclaration Declaration\n {\n get { return new VertexDeclaration(vertexElements); }\n }\n\n VertexDeclaration IVertexType.VertexDeclaration\n {\n get { return new VertexDeclaration(vertexElements); }\n }\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eUsing the WP7 emulator, nothing is drawn if I use this structure - no artifacts, nothing! However, if I use an identical structure where the Short2 structs are replaced by Vector2 then it all works perfectly.\u003c/p\u003e\n\n\u003cp\u003eI've found a reference to this being an emulator-specific issue: \"In the Windows Phone Emulator, the SkinnedEffect bone index channel must be specified as one of the integer vertex element formats - either Byte4, Short2, or Short4. This same set of integer data formats cannot be used for other shader input channels such as colors, positions, and texture coordinates on the emulator.\" (http://www.softpedia.com/progChangelog/Windows-Phone-Developer-Tools-Changelog-154611.html) However this is from July 2010 and I'd have assumed this limitation has been fixed by now...? Unfortunately I don't have a device to test on.\u003c/p\u003e\n\n\u003cp\u003eCan anyone confirm that this is still an issue in the emulator or point me at another reason why this is not working?\u003c/p\u003e","accepted_answer_id":"11156678","answer_count":"1","comment_count":"1","creation_date":"2012-06-07 23:48:36.68 UTC","last_activity_date":"2012-06-22 13:00:33.263 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"827029","post_type_id":"1","score":"0","tags":"c#|windows-phone-7|xna-4.0","view_count":"95"} {"id":"19751621","title":"Windows Phone App 7.5 - 8.0 migration: Resuming... screen is infinitive","body":"\u003cp\u003eAfter migrating windows phone application from 7.5 to 8.0, it starts showing \"Resuming\" screen infinitely after hitting windows button first and after that back button on one of the page.\nCan anybody help please how to troubleshoot this problem?\u003c/p\u003e","answer_count":"1","comment_count":"0","creation_date":"2013-11-03 09:46:53.6 UTC","last_activity_date":"2014-06-17 23:21:28.887 UTC","last_edit_date":"2014-06-17 23:21:28.887 UTC","last_editor_display_name":"","last_editor_user_id":"881229","owner_display_name":"","owner_user_id":"2949518","post_type_id":"1","score":"1","tags":"windows-phone-7|windows-phone-8","view_count":"73"} -{"id":"32158616","title":"Get direct url of youtube video","body":"\u003cp\u003eI wonder how to get the direct url of youtube video. For example, when i analyzed the video code of \u003ca href=\"https://www.youtube.com/watch?v=OrTyD7rjBpw\" rel=\"nofollow\"\u003ehttps://www.youtube.com/watch?v=OrTyD7rjBpw\u003c/a\u003e\u003c/p\u003e\n\n\u003cp\u003ei found some URLs inside the js code of the player, like \u003ca href=\"https://r6---sn-w511uxa-cjoe.googlevideo.com/videoplayback?mt=1440164084\u0026amp;mv=m\u0026amp;ms=au\u0026amp;mm=31\u0026amp;mn=sn-w511uxa-cjoe\u0026amp;upn=ELDhWOVFRzM\u0026amp;id=o-AM6zxCNJwi5l5gjbq_262NpEnieXQ2iQTkGLLDieVKs4\u0026amp;ip=188.77.186.165\u0026amp;sparams=dur%2Cgcr%2Cid%2Cinitcwndbps%2Cip%2Cipbits%2Citag%2Clmt%2Cmime%2Cmm%2Cmn%2Cms%2Cmv%2Cpcm2cms%2Cpl%2Cratebypass%2Crequiressl%2Csource%2Cupn%2Cexpire\u0026amp;fexp=3300113%2C3300134%2C3300137%2C3300164%2C3310699%2C3312381%2C3312531%2C9407535%2C9408710%2C9409069%2C9412877%2C9413010%2C9414935%2C9415365%2C9415417%2C9415485%2C9416023%2C9416105%2C9416126%2C9416522%2C9417353%2C9417707%2C9418060%2C9418153%2C9418203%2C9418449%2C9419675\u0026amp;dur=0.000\u0026amp;initcwndbps=1298750\u0026amp;pl=20\u0026amp;ratebypass=yes\u0026amp;source=youtube\u0026amp;gcr=es\u0026amp;pcm2cms=yes\u0026amp;requiressl=yes\u0026amp;expire=1440185744\u0026amp;mime=video%2Fwebm\u0026amp;key=yt5\u0026amp;ipbits=0\u0026amp;lmt=1365511426344921\u0026amp;sver=3\u0026amp;itag=43\" rel=\"nofollow\"\u003ehttps://r6---sn-w511uxa-cjoe.googlevideo.com/videoplayback?mt=1440164084\u0026amp;mv=m\u0026amp;ms=au\u0026amp;mm=31\u0026amp;mn=sn-w511uxa-cjoe\u0026amp;upn=ELDhWOVFRzM\u0026amp;id=o-AM6zxCNJwi5l5gjbq_262NpEnieXQ2iQTkGLLDieVKs4\u0026amp;ip=188.77.186.165\u0026amp;sparams=dur%2Cgcr%2Cid%2Cinitcwndbps%2Cip%2Cipbits%2Citag%2Clmt%2Cmime%2Cmm%2Cmn%2Cms%2Cmv%2Cpcm2cms%2Cpl%2Cratebypass%2Crequiressl%2Csource%2Cupn%2Cexpire\u0026amp;fexp=3300113%2C3300134%2C3300137%2C3300164%2C3310699%2C3312381%2C3312531%2C9407535%2C9408710%2C9409069%2C9412877%2C9413010%2C9414935%2C9415365%2C9415417%2C9415485%2C9416023%2C9416105%2C9416126%2C9416522%2C9417353%2C9417707%2C9418060%2C9418153%2C9418203%2C9418449%2C9419675\u0026amp;dur=0.000\u0026amp;initcwndbps=1298750\u0026amp;pl=20\u0026amp;ratebypass=yes\u0026amp;source=youtube\u0026amp;gcr=es\u0026amp;pcm2cms=yes\u0026amp;requiressl=yes\u0026amp;expire=1440185744\u0026amp;mime=video%2Fwebm\u0026amp;key=yt5\u0026amp;ipbits=0\u0026amp;lmt=1365511426344921\u0026amp;sver=3\u0026amp;itag=43\u003c/a\u003e\u003c/p\u003e\n\n\u003cp\u003eBut it doesn't redirect to the youtube video, so i'm thinking that code is more obfuscated\u003c/p\u003e","answer_count":"1","comment_count":"5","creation_date":"2015-08-22 16:59:40.413 UTC","last_activity_date":"2016-03-30 03:31:36.383 UTC","last_edit_date":"2015-08-22 17:00:28.587 UTC","last_editor_display_name":"","last_editor_user_id":"4433386","owner_display_name":"","owner_user_id":"2301283","post_type_id":"1","score":"0","tags":"youtube","view_count":"10573"} +{"id":"32158616","title":"Get direct url of youtube video","body":"\u003cp\u003eI wonder how to get the direct url of youtube video. For example, when i analyzed the video code of \u003ca href=\"https://www.youtube.com/watch?v=OrTyD7rjBpw\" rel=\"nofollow\"\u003ehttps://www.youtube.com/watch?v=OrTyD7rjBpw\u003c/a\u003e\u003c/p\u003e\n\n\u003cp\u003ei found some URLs inside the js code of the player, like \u003ca href=\"https://r6---sn-w511uxa-cjoe.googlevideo.com/videoplayback?mt=1440164084\u0026amp;mv=m\u0026amp;ms=au\u0026amp;mm=31\u0026amp;mn=sn-w511uxa-cjoe\u0026amp;upn=ELDhWOVFRzM\u0026amp;id=o-AM6zxCNJwi5l5gjbq_262NpEnieXQ2iQTkGLLDieVKs4\u0026amp;ip=188.77.186.165\u0026amp;sparams=dur%2Cgcr%2Cid%2Cinitcwndbps%2Cip%2Cipbits%2Citag%2Clmt%2Cmime%2Cmm%2Cmn%2Cms%2Cmv%2Cpcm2cms%2Cpl%2Cratebypass%2Crequiressl%2Csource%2Cupn%2Cexpire\u0026amp;fexp=3300113%2C3300134%2C3300137%2C3300164%2C3310699%2C3312381%2C3312531%2C9407535%2C9408710%2C9409069%2C9412877%2C9413010%2C9414935%2C9415365%2C9415417%2C9415485%2C9416023%2C9416105%2C9416126%2C9416522%2C9417353%2C9417707%2C9418060%2C9418153%2C9418203%2C9418449%2C9419675\u0026amp;dur=0.000\u0026amp;initcwndbps=1298750\u0026amp;pl=20\u0026amp;ratebypass=yes\u0026amp;source=youtube\u0026amp;gcr=opensearch\u0026amp;pcm2cms=yes\u0026amp;requiressl=yes\u0026amp;expire=1440185744\u0026amp;mime=video%2Fwebm\u0026amp;key=yt5\u0026amp;ipbits=0\u0026amp;lmt=1365511426344921\u0026amp;sver=3\u0026amp;itag=43\" rel=\"nofollow\"\u003ehttps://r6---sn-w511uxa-cjoe.googlevideo.com/videoplayback?mt=1440164084\u0026amp;mv=m\u0026amp;ms=au\u0026amp;mm=31\u0026amp;mn=sn-w511uxa-cjoe\u0026amp;upn=ELDhWOVFRzM\u0026amp;id=o-AM6zxCNJwi5l5gjbq_262NpEnieXQ2iQTkGLLDieVKs4\u0026amp;ip=188.77.186.165\u0026amp;sparams=dur%2Cgcr%2Cid%2Cinitcwndbps%2Cip%2Cipbits%2Citag%2Clmt%2Cmime%2Cmm%2Cmn%2Cms%2Cmv%2Cpcm2cms%2Cpl%2Cratebypass%2Crequiressl%2Csource%2Cupn%2Cexpire\u0026amp;fexp=3300113%2C3300134%2C3300137%2C3300164%2C3310699%2C3312381%2C3312531%2C9407535%2C9408710%2C9409069%2C9412877%2C9413010%2C9414935%2C9415365%2C9415417%2C9415485%2C9416023%2C9416105%2C9416126%2C9416522%2C9417353%2C9417707%2C9418060%2C9418153%2C9418203%2C9418449%2C9419675\u0026amp;dur=0.000\u0026amp;initcwndbps=1298750\u0026amp;pl=20\u0026amp;ratebypass=yes\u0026amp;source=youtube\u0026amp;gcr=opensearch\u0026amp;pcm2cms=yes\u0026amp;requiressl=yes\u0026amp;expire=1440185744\u0026amp;mime=video%2Fwebm\u0026amp;key=yt5\u0026amp;ipbits=0\u0026amp;lmt=1365511426344921\u0026amp;sver=3\u0026amp;itag=43\u003c/a\u003e\u003c/p\u003e\n\n\u003cp\u003eBut it doesn't redirect to the youtube video, so i'm thinking that code is more obfuscated\u003c/p\u003e","answer_count":"1","comment_count":"5","creation_date":"2015-08-22 16:59:40.413 UTC","last_activity_date":"2016-03-30 03:31:36.383 UTC","last_edit_date":"2015-08-22 17:00:28.587 UTC","last_editor_display_name":"","last_editor_user_id":"4433386","owner_display_name":"","owner_user_id":"2301283","post_type_id":"1","score":"0","tags":"youtube","view_count":"10573"} {"id":"5377693","title":"Android app encoding problem - not display ÅÄÖ properly","body":"\u003cp\u003eWhen user enters a value 'Nedskräpning' in a EditTextbox, it is saved as 'Nedskräpning' in the database. So I am assuming this error is generated from the Android part of the application. UPDATE: \u0026lt;-- probably false assumption? \u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e \u0026lt;EditText android:id=\"@+id/commentTextBox\"\n android:layout_height=\"fill_parent\"\n android:layout_width=\"fill_parent\" \n android:layout_weight=\"1\"/\u0026gt;\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eThe java code:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003ecommentTextBox = (EditText) findViewById(R.id.commentTextBox);\ncrapport.setComment(commentTextBox.getText().toString());\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eThen this crapport is saved in the database. \u003c/p\u003e\n\n\u003cp\u003eAny tips on how to solve this?\u003c/p\u003e\n\n\u003cp\u003eUPDATE: This is my Apache CXF webservice:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e @Path(\"/crapportService/\")\npublic class CrapportServiceImpl implements CrapportService {\n\n @Autowired\n private CrapportController crapportController;\n\n @Override\n @Path(\"image\")\n @POST\n @Produces(MediaType.TEXT_HTML)\n public String addReport(MultipartBody multipartBody) {\n\n List\u0026lt;Attachment\u0026gt; attachmentList = new ArrayList\u0026lt;Attachment\u0026gt;();\n attachmentList = multipartBody.getAllAttachments();\n\n if (attachmentList.size() == 0){\n return \"No attachments\";\n }\n\n try {\n crapportController.processReportFromClient(attachmentList);\n } catch (FileNotFoundException e) {\n e.printStackTrace();\n return \"Error: FileNotFoundException\";\n } catch (NullPointerException e) {\n return \"Error: NullPointerException\";\n } catch (IOException e) {\n return \"Error: IOException\";\n }\n\n return \"Success\";\n }\n\n}\n\u003c/code\u003e\u003c/pre\u003e","answer_count":"2","comment_count":"0","creation_date":"2011-03-21 12:40:40.07 UTC","favorite_count":"1","last_activity_date":"2011-03-21 15:00:40.337 UTC","last_edit_date":"2011-03-21 13:35:25.27 UTC","last_editor_display_name":"","last_editor_user_id":"463833","owner_display_name":"","owner_user_id":"463833","post_type_id":"1","score":"0","tags":"android|mysql|utf-8|character-encoding","view_count":"1911"} {"id":"30026736","title":"DataContractSerializer - serialize string as value and not reference","body":"\u003cp\u003eI use \u003ccode\u003eDataContract\u003c/code\u003e serializer to serialize my data. I use \u003ccode\u003ePreserveObjectReferences = true\u003c/code\u003e because I need it.\nI have two objects for example:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e[Datacontract]\nclass A\n{\n [DataMember] string _name;\n ...\n public A(string name)\n {\n _name = name;\n }\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eand\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e[Datacontract]\nclass B\n{\n [DataMember] string _name;\n ...\n public B(string name)\n {\n _name = name;\n }\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eBoth have \u003ccode\u003e_name\u003c/code\u003e field.\nThen I create instance of A and B where I use as a name of the second object the same name from object A:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003evar obj1 = new A(\"John\");\nvar obj2 = new B(obj1.Name);\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eThen I serialize it, and XML contains:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e...\n\u0026lt;d11p1:_name z:Id=\"505\"\u0026gt;John\u0026lt;/d11p1:_name\u0026gt;\n..\n\u0026lt;d11p1:_name z:Ref=\"505\" i:nil=\"true\" /\u0026gt;\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eSo the field \u003ccode\u003e_name\u003c/code\u003e of the second object is serialized as reference and not Value.\u003c/p\u003e\n\n\u003cp\u003eThen I don't need object A so I delete it. But I want to be abble to open an old saved file which contains data from class A, but in the new version of my program I don't need class A anymore.\u003c/p\u003e\n\n\u003cp\u003eThe problem is that \u003ccode\u003eDataContractSerializer\u003c/code\u003e cannot deserialize instance of B because its \u003ccode\u003e_name\u003c/code\u003e is a reference to \u003ccode\u003e_name\u003c/code\u003e of A which is not deserialized (class is deleted).\u003c/p\u003e\n\n\u003cp\u003eIs there any way to force \u003ccode\u003eDataContractSerializer\u003c/code\u003e to serialize a string as Value type instead of reference?\u003c/p\u003e\n\n\u003cp\u003eI know that one solution is not to delete class A but it contains a lot of data which isn't important in the new version of my program.\u003c/p\u003e","answer_count":"1","comment_count":"7","creation_date":"2015-05-04 09:27:57.357 UTC","last_activity_date":"2015-05-04 11:46:31.76 UTC","last_edit_date":"2015-05-04 11:46:31.76 UTC","last_editor_display_name":"","last_editor_user_id":"3599179","owner_display_name":"","owner_user_id":"4861575","post_type_id":"1","score":"2","tags":"c#|datacontractserializer","view_count":"388"} {"id":"40107205","title":"Why is GPUImage slower than CPU calculation when calculating average color of image?","body":"\u003cp\u003eIn my project I am trying to get the average color of a series of \u003ccode\u003eUIImage\u003c/code\u003e objects. So I implemented a category \u003ccode\u003eUIImage(AverageColor)\u003c/code\u003e to calculate the average color.\u003c/p\u003e\n\n\u003cp\u003eHere's how I do it with the \u003ccode\u003eGPUImage\u003c/code\u003e library:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e- (void)averageColor:(void (^)(UIColor *))handler {\n dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{\n GPUImagePicture *picture = [[GPUImagePicture alloc] initWithImage:self];\n GPUImageAverageColor *averageColorFilter = [GPUImageAverageColor new];\n [averageColorFilter setColorAverageProcessingFinishedBlock:^(CGFloat r, CGFloat g, CGFloat b, CGFloat a, CMTime frameTime) {\n UIColor *color = [UIColor colorWithRed:r green:g blue:b alpha:a];\n dispatch_async(dispatch_get_main_queue(), ^{\n handler(color);\n });\n }];\n\n [picture addTarget:averageColorFilter];\n [picture useNextFrameForImageCapture];\n [picture processImage];\n });\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eI've also tried the approach (pure CPU one I think?) in \u003ca href=\"https://stackoverflow.com/a/5562246/3719276\"\u003ethis answer\u003c/a\u003e. Then I test these two methods with the same \u003ccode\u003eUIImage\u003c/code\u003e and log out the time used in each methods. And here's the result:\u003c/p\u003e\n\n\u003cblockquote\u003e\n \u003cp\u003ecpu time: 0.102402\u003c/p\u003e\n \n \u003cp\u003egpu time: 0.414044\u003c/p\u003e\n\u003c/blockquote\u003e\n\n\u003cp\u003eI am surprised that the CPU one runs much faster. So what's the problem here? Am I using \u003ccode\u003eGPUImage\u003c/code\u003e in a wrong way?\u003c/p\u003e\n\n\u003chr\u003e\n\n\u003ch3\u003eEdit:\u003c/h3\u003e\n\n\u003cp\u003eThe above result was got from iOS simulator. When testing on real device (iPhone6s) the difference is even greater:\u003c/p\u003e\n\n\u003cblockquote\u003e\n \u003cp\u003ecpu time: 0.019034\u003c/p\u003e\n \n \u003cp\u003egpu time: 0.137635\u003c/p\u003e\n\u003c/blockquote\u003e","answer_count":"1","comment_count":"0","creation_date":"2016-10-18 11:36:49.35 UTC","last_activity_date":"2016-10-27 15:04:57.813 UTC","last_edit_date":"2017-05-23 10:27:32.523 UTC","last_editor_display_name":"","last_editor_user_id":"-1","owner_display_name":"","owner_user_id":"3719276","post_type_id":"1","score":"0","tags":"ios|cocoa-touch|uiimage|core-graphics|gpuimage","view_count":"79"} @@ -1384,7 +1384,7 @@ {"id":"25354805","title":"How do I use the command \"heroku pg:transfer\"?","body":"\u003cp\u003eI am very new to heroku/ruby on rails and git. I just went through Michael Hartl's Ruby on Rails tutorial and want to push my local database to heroku but am having trouble. \u003c/p\u003e\n\n\u003cp\u003eAfter doing some research I found this article:\u003c/p\u003e\n\n\u003cp\u003e\u003ca href=\"http://www.higherorderheroku.com/articles/pgtransfer-is-the-new-taps/\" rel=\"nofollow\"\u003epg transfer is the new taps\u003c/a\u003e\u003c/p\u003e\n\n\u003cp\u003eIt seems like it should work, but I do not understand how to set my env var \u003ccode\u003eDATABASE_URL\u003c/code\u003e:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e$ env DATABASE_URL=postgres://localhost/someapp-dev heroku pg:transfer\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eSpecifically I have no idea what I am supposed to directly copy and what to change. I believe I need to enter my own local host and my own database name. \u003c/p\u003e\n\n\u003cp\u003eIs this correct? If so how do I find my localhost and how do I find my database name? \u003c/p\u003e\n\n\u003cp\u003eMy database.yml file looks like this:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003edevelopment:\n adapter: sqlite3\n database: db/development.sqlite3\n pool: 5\n timeout: 5000\n\u003c/code\u003e\u003c/pre\u003e","answer_count":"1","comment_count":"3","creation_date":"2014-08-18 00:09:30.45 UTC","last_activity_date":"2014-08-18 01:16:13.927 UTC","last_edit_date":"2014-08-18 01:16:13.927 UTC","last_editor_display_name":"","last_editor_user_id":"585456","owner_display_name":"","owner_user_id":"3911994","post_type_id":"1","score":"1","tags":"ruby-on-rails|ruby|git|postgresql|heroku","view_count":"261"} {"id":"5491389","title":"CakePHP - Html-\u003elink - why use controller=\u003e and action=\u003e instead of just controller/action","body":"\u003cp\u003e\u003cstrong\u003eWhy this:\u003c/strong\u003e\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eecho $this-\u0026gt;Html-\u0026gt;link('Add a User', array('controller'=\u0026gt;'users', 'action'=\u0026gt;'add'));\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003e\u003cstrong\u003eInstead of just this:\u003c/strong\u003e\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eecho $this-\u0026gt;Html-\u0026gt;link('Add a User', 'users/add');\n\u003c/code\u003e\u003c/pre\u003e","accepted_answer_id":"5491498","answer_count":"2","comment_count":"0","creation_date":"2011-03-30 19:39:32.257 UTC","last_activity_date":"2015-04-07 04:53:13.97 UTC","last_edit_date":"2014-10-09 18:32:03.317 UTC","last_editor_display_name":"","last_editor_user_id":"759866","owner_display_name":"","owner_user_id":"673664","post_type_id":"1","score":"4","tags":"php|cakephp|cakephp-1.3","view_count":"7734"} {"id":"44617797","title":"Google cloud compute engine high latency","body":"\u003cp\u003eI just started using google cloud, and I've been having issues with my instance. The connection is really unstable and has really high ping. Used speedtest to test it, and has over 150 ms. Is this normal? It shouldnt be. I'd appreciate anyones help.\u003c/p\u003e","answer_count":"0","comment_count":"4","creation_date":"2017-06-18 17:42:36.333 UTC","favorite_count":"1","last_activity_date":"2017-06-18 17:42:36.333 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"8179674","post_type_id":"1","score":"2","tags":"google-cloud-platform","view_count":"118"} -{"id":"14071320","title":"rotation along x and y axis","body":"\u003cp\u003eI'm using GLKit along with PowerVR library for my opengl-es 2.0 3D app. The 3D scene loads with several meshes, which simulate a garage environment. I have a car in the center of the garage. I am trying to add touch handling to the app, where the user can rotate the room around (e.g., to see all 4 walls surrounding the car). I also want to allow a rotation on the x axis, though limited to a small range. Basically they can see from a little bit of the top of the car to just above the floor level.\u003c/p\u003e\n\n\u003cp\u003eI am able to rotate on the Y OR on the X, but not both. As soon as I rotate on both axis, the car is thrown off-axis. The car isn't level with the camera anymore. I wish I could explain this better, but hopefully you guys will understand.\u003c/p\u003e\n\n\u003cp\u003eHere is my touches implementation:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e- (void)touchesMoved:(NSSet *)touches withEvent:(UIEvent *)event {\n\n UITouch * touch = [touches anyObject];\n CGPoint location = [touch locationInView:self.view]; \n CGPoint lastLoc = [touch previousLocationInView:self.view];\n CGPoint diff = CGPointMake(lastLoc.x - location.x, lastLoc.y - location.y);\n\n float rotX = -1 * GLKMathDegreesToRadians(diff.x / 4.0);\n float rotY = GLKMathDegreesToRadians(diff.y / 5.0);\n\n PVRTVec3 xAxis = PVRTVec3(1, 0, 0);\n PVRTVec3 yAxis = PVRTVec3(0,1,0);\n\n PVRTMat4 yRotMatrix, xRotMatrix;\n\n // create rotation matrices with angle\n PVRTMatrixRotationXF(yRotMatrix, rotY);\n PVRTMatrixRotationYF(xRotMatrix, -rotX);\n\n _rotationY = _rotationY * yRotMatrix;\n _rotationX = _rotationX * xRotMatrix;\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eHere's my update method:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e- (void)update {\n\n // Use the loaded effect\n m_pEffect-\u0026gt;Activate();\n\n\n PVRTVec3 vFrom, vTo, vUp;\n VERTTYPE fFOV;\n vUp.x = 0.0f;\n vUp.y = 1.0f;\n vUp.z = 0.0f;\n\n // We can get the camera position, target and field of view (fov) with GetCameraPos()\n fFOV = m_Scene.GetCameraPos(vFrom, vTo, 0);\n\n /*\n We can build the world view matrix from the camera position, target and an up vector.\n For this we use PVRTMat4LookAtRH().\n */\n m_mView = PVRTMat4::LookAtRH(vFrom, vTo, vUp);\n\n // rotate the camera based on the users swipe in the X direction (THIS WORKS)\n m_mView = m_mView * _rotationX;\n\n // Calculates the projection matrix\n bool bRotate = false;\n m_mProjection = PVRTMat4::PerspectiveFovRH(fFOV, (float)1024.0/768.0, CAM_NEAR, CAM_FAR, PVRTMat4::OGL, bRotate);\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eI've tried multiplying the new X rotation matrix to the current scene rotation first, and then multiplying the new Y rotation matrix second. I've tried the reverse of that, thinking the order of multiplication was my problem. That didn't help. Then I tried adding the new X and Y rotation matrices together before multiplying to the current rotation, but that didn't work either. I feel that I'm close, but at this point I'm just out of ideas.\u003c/p\u003e\n\n\u003cp\u003eCan you guys help? Thanks. -Valerie\u003c/p\u003e\n\n\u003cp\u003e\u003cstrong\u003eUpdate:\u003c/strong\u003e In an effort to solve this, I'm trying to simplify it a little. I've updated the above code, removing any limit in the range of the Y rotation. Basically I calculate the X and Y rotation based on the user swipe on the screen. \u003c/p\u003e\n\n\u003cp\u003eIf I understand this correctly, I think I want to rotate the View matrix (camera/eye) with the calculation for the _rotationX. \u003c/p\u003e\n\n\u003cp\u003eI think I need to use the World matrix (origin 0,0,0) for the _rotationY calculation. I'll try and get some images of exactly what I'm talking about.\u003c/p\u003e","accepted_answer_id":"14084821","answer_count":"1","comment_count":"0","creation_date":"2012-12-28 14:52:53.007 UTC","last_activity_date":"2012-12-29 18:45:17.603 UTC","last_edit_date":"2012-12-29 03:59:45.12 UTC","last_editor_display_name":"","last_editor_user_id":"574623","owner_display_name":"","owner_user_id":"574623","post_type_id":"1","score":"1","tags":"ios|opengl-es-2.0|powervr-sgx","view_count":"375"} +{"id":"14071320","title":"rotation along x and y axis","body":"\u003cp\u003eI'm using GLKit along with PowerVR library for my opengl-opensearch 2.0 3D app. The 3D scene loads with several meshes, which simulate a garage environment. I have a car in the center of the garage. I am trying to add touch handling to the app, where the user can rotate the room around (e.g., to see all 4 walls surrounding the car). I also want to allow a rotation on the x axis, though limited to a small range. Basically they can see from a little bit of the top of the car to just above the floor level.\u003c/p\u003e\n\n\u003cp\u003eI am able to rotate on the Y OR on the X, but not both. As soon as I rotate on both axis, the car is thrown off-axis. The car isn't level with the camera anymore. I wish I could explain this better, but hopefully you guys will understand.\u003c/p\u003e\n\n\u003cp\u003eHere is my touches implementation:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e- (void)touchesMoved:(NSSet *)touches withEvent:(UIEvent *)event {\n\n UITouch * touch = [touches anyObject];\n CGPoint location = [touch locationInView:self.view]; \n CGPoint lastLoc = [touch previousLocationInView:self.view];\n CGPoint diff = CGPointMake(lastLoc.x - location.x, lastLoc.y - location.y);\n\n float rotX = -1 * GLKMathDegreesToRadians(diff.x / 4.0);\n float rotY = GLKMathDegreesToRadians(diff.y / 5.0);\n\n PVRTVec3 xAxis = PVRTVec3(1, 0, 0);\n PVRTVec3 yAxis = PVRTVec3(0,1,0);\n\n PVRTMat4 yRotMatrix, xRotMatrix;\n\n // create rotation matrices with angle\n PVRTMatrixRotationXF(yRotMatrix, rotY);\n PVRTMatrixRotationYF(xRotMatrix, -rotX);\n\n _rotationY = _rotationY * yRotMatrix;\n _rotationX = _rotationX * xRotMatrix;\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eHere's my update method:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e- (void)update {\n\n // Use the loaded effect\n m_pEffect-\u0026gt;Activate();\n\n\n PVRTVec3 vFrom, vTo, vUp;\n VERTTYPE fFOV;\n vUp.x = 0.0f;\n vUp.y = 1.0f;\n vUp.z = 0.0f;\n\n // We can get the camera position, target and field of view (fov) with GetCameraPos()\n fFOV = m_Scene.GetCameraPos(vFrom, vTo, 0);\n\n /*\n We can build the world view matrix from the camera position, target and an up vector.\n For this we use PVRTMat4LookAtRH().\n */\n m_mView = PVRTMat4::LookAtRH(vFrom, vTo, vUp);\n\n // rotate the camera based on the users swipe in the X direction (THIS WORKS)\n m_mView = m_mView * _rotationX;\n\n // Calculates the projection matrix\n bool bRotate = false;\n m_mProjection = PVRTMat4::PerspectiveFovRH(fFOV, (float)1024.0/768.0, CAM_NEAR, CAM_FAR, PVRTMat4::OGL, bRotate);\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eI've tried multiplying the new X rotation matrix to the current scene rotation first, and then multiplying the new Y rotation matrix second. I've tried the reverse of that, thinking the order of multiplication was my problem. That didn't help. Then I tried adding the new X and Y rotation matrices together before multiplying to the current rotation, but that didn't work either. I feel that I'm close, but at this point I'm just out of ideas.\u003c/p\u003e\n\n\u003cp\u003eCan you guys help? Thanks. -Valerie\u003c/p\u003e\n\n\u003cp\u003e\u003cstrong\u003eUpdate:\u003c/strong\u003e In an effort to solve this, I'm trying to simplify it a little. I've updated the above code, removing any limit in the range of the Y rotation. Basically I calculate the X and Y rotation based on the user swipe on the screen. \u003c/p\u003e\n\n\u003cp\u003eIf I understand this correctly, I think I want to rotate the View matrix (camera/eye) with the calculation for the _rotationX. \u003c/p\u003e\n\n\u003cp\u003eI think I need to use the World matrix (origin 0,0,0) for the _rotationY calculation. I'll try and get some images of exactly what I'm talking about.\u003c/p\u003e","accepted_answer_id":"14084821","answer_count":"1","comment_count":"0","creation_date":"2012-12-28 14:52:53.007 UTC","last_activity_date":"2012-12-29 18:45:17.603 UTC","last_edit_date":"2012-12-29 03:59:45.12 UTC","last_editor_display_name":"","last_editor_user_id":"574623","owner_display_name":"","owner_user_id":"574623","post_type_id":"1","score":"1","tags":"ios|opengl-opensearch-2.0|powervr-sgx","view_count":"375"} {"id":"32443890","title":"How to make Google map fill available height between dynamic header and footer","body":"\u003cp\u003eI have a header and footer, each with dynamic content. This is actually an aspx page with a master page which contains header/footer content which may vary in size. I can not enforce a px height for header or footer as they may have images or just text, etc. I want to make the Google map fill the available page height (and width) between them. I'll give the map canvas a minimum height of say 200px, just in case, but otherwise it should force the footer to bottom of the page without scrolling (unless the screen is short enough for the 200px minimum to require scrolling).\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e\u0026lt;body\u0026gt;\n\u0026lt;div id=\"wrapper\"\u0026gt;\n \u0026lt;div id=\"header\"\u0026gt;Title\u0026lt;br /\u0026gt;of\u0026lt;br /\u0026gt;variable\u0026lt;br/\u0026gt;height\u0026lt;/div\u0026gt;\n \u0026lt;div id=\"body\"\u0026gt;\n \u0026lt;div id=\"map-canvas\"\u0026gt;\u0026lt;/div\u0026gt;\n \u0026lt;/div\u0026gt;\n \u0026lt;div id=\"footer\"\u0026gt;\n Footer\u0026lt;br /\u0026gt;of\u0026lt;br /\u0026gt;variable\u0026lt;br /\u0026gt;height\n \u0026lt;/div\u0026gt;\n\u0026lt;/div\u0026gt;\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003e\u003c/p\u003e\n\n\u003cp\u003eHere is a Fiddle showing it very close using flex approach... It seems to work in Chrome/FireFox but this does not work in IE11.\n\u003ca href=\"https://jsfiddle.net/randbrown/7dc8u6ja/4/\" rel=\"nofollow\"\u003ehttps://jsfiddle.net/randbrown/7dc8u6ja/4/\u003c/a\u003e\u003c/p\u003e\n\n\u003cp\u003eIs the flex-based approach best for this and if so what am I missing to get it working in IE? Or is there a better way to achieve this?\u003c/p\u003e","answer_count":"1","comment_count":"0","creation_date":"2015-09-07 17:53:41.91 UTC","last_activity_date":"2015-09-09 14:24:28.733 UTC","last_edit_date":"2015-09-08 03:20:14.767 UTC","last_editor_display_name":"","last_editor_user_id":"1213296","owner_display_name":"","owner_user_id":"438365","post_type_id":"1","score":"-1","tags":"html|css|google-maps|flexbox","view_count":"1187"} {"id":"46786847","title":"How can I read single-channel 32-bit integer images with python?","body":"\u003cp\u003eI want to read single-channel 32-bit integer image saved as ssv file.\nI have tried so far(see the following code) but without much success. \u003c/p\u003e\n\n\u003cp\u003e\u003ca href=\"https://i.stack.imgur.com/J1LEG.png\" rel=\"nofollow noreferrer\"\u003eHere is the code\u003c/a\u003e\u003c/p\u003e\n\n\u003cp\u003ePlease let me know if you have any idea of what is missing?\u003c/p\u003e","accepted_answer_id":"46792308","answer_count":"1","comment_count":"2","creation_date":"2017-10-17 09:26:28.537 UTC","last_activity_date":"2017-10-17 14:14:08.91 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"8287770","post_type_id":"1","score":"-1","tags":"python|numpy|import|text-files","view_count":"27"} {"id":"24650309","title":"Update cannot proceed due to validation errors.","body":"\u003cp\u003ebelow is my code \u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eCREATE TABLE [dbo].[tbl_company] (\n [Id] INT IDENTITY (1, 1) NOT NULL,\n [company_Id] INT NOT NULL,\n [typeOfCompany] VARCHAR (250) NULL,\n [ownership_Id] INT NULL, \n CONSTRAINT [PK_tbl_company] PRIMARY KEY ([Id])\n);\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003ei am getting below error:\nThe referenced table '[dbo].[tbl_company]' contains no primary or candidate keys that match the referencing column list in the foreign key. If the referenced column is a computed column, it should be persisted.\u003c/p\u003e","answer_count":"1","comment_count":"1","creation_date":"2014-07-09 09:43:50.89 UTC","last_activity_date":"2014-07-09 09:50:35.91 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"3819789","post_type_id":"1","score":"0","tags":"sql-server","view_count":"626"} @@ -1396,7 +1396,7 @@ {"id":"13998683","title":"Bootstrap / Dropdown Button","body":"\u003cp\u003eHi Im trying to create a dropdown button using bootstrap. But it just doesnt seem to come out correctly ?\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e\u0026lt;div class=\"btn-group\"\u0026gt;\n \u0026lt;a class=\"btn dropdown-toggle\" data-toggle=\"dropdown\" href=\"#\"\u0026gt;\n Action\n \u0026lt;span class=\"caret\"\u0026gt;\u0026lt;/span\u0026gt;\n \u0026lt;/a\u0026gt;\n \u0026lt;ul class=\"dropdown-menu\"\u0026gt;\n \u0026lt;li\u0026gt;\u0026lt;a href=\"#\"\u0026gt;Foo\u0026lt;/a\u0026gt;\u0026lt;/li\u0026gt;\n \u0026lt;li\u0026gt;\u0026lt;a href=\"#\"\u0026gt;Bar\u0026lt;/a\u0026gt;\u0026lt;/li\u0026gt;\n \u0026lt;/ul\u0026gt;\n\u0026lt;/div\u0026gt;\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eA jsfiddle is here \u003ca href=\"http://jsfiddle.net/UrgP8/\"\u003ehttp://jsfiddle.net/UrgP8/\u003c/a\u003e\u003c/p\u003e\n\n\u003cp\u003eAny ideas ?\u003c/p\u003e\n\n\u003cp\u003eThanks,\u003c/p\u003e","accepted_answer_id":"13998987","answer_count":"2","comment_count":"1","creation_date":"2012-12-21 23:51:00.033 UTC","favorite_count":"1","last_activity_date":"2013-07-27 16:03:29.903 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"1325133","post_type_id":"1","score":"5","tags":"html|twitter-bootstrap","view_count":"20516"} {"id":"12955999","title":"Cascading drop down list with ajax in php","body":"\u003cp\u003eI have a cascading dropdown list which is fetched from the database through ajax.\nThe list loads but its not posting to the database nor is the code seen behind.\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003efunction getXMLHTTP() { //function to return the xml http object\n var xmlhttp=false; \n try{\n xmlhttp=new XMLHttpRequest();\n }\n catch(e) { \n try{ \n xmlhttp= new ActiveXObject(\"Microsoft.XMLHTTP\");\n }\n catch(e){\n try{\n xmlhttp = new ActiveXObject(\"Msxml2.XMLHTTP\");\n }\n catch(e1){\n xmlhttp=false;\n }\n }\n }\n\n return xmlhttp;\n }\n\nfunction getCity(stateid)\n{\n //alert(stateid);\n var strURL=\"findCity.php?state=\"+stateid;\n var req = getXMLHTTP();\n if (req)\n {\n req.onreadystatechange = function()\n {\n if (req.readyState == 4) // only if \"OK\"\n {\n if (req.status == 200)\n {\n document.getElementById('citydiv').innerHTML=req.responseText;\n } else {\n alert(\"There was a problem while using XMLHTTP:\\n\" + req.statusText);\n }\n }\n }\n req.open(\"GET\", strURL, true);\n req.send(null);\n }\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eand the php file\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e\u0026lt;? $state=$_GET['state'];\n $link = mysql_connect('localhost', '', ''); //change the configuration if required\n if (!$link) {\n die('Could not connect: ' . mysql_error());\n }\n mysql_select_db('a'); //change this if required\n $query=\"select FOR_CODE,FOR_DESC from maruti_city where FOR_STAT_CODE='{$state}' order by FOR_DESC\";\n $result=mysql_query($query);?\u0026gt;\n \u0026lt;select name=\"city\" onchange=\"getDealer(this.value)\" class=\"sel\" \u0026gt;\n \u0026lt;option value=\"0\"\u0026gt;Select City\u0026lt;/option\u0026gt;\n \u0026lt;? while($row=mysql_fetch_array($result)) { ?\u0026gt;\n \u0026lt;option value\u0026gt;\u0026lt;?=$row['FOR_DESC']?\u0026gt;\u0026lt;/option\u0026gt;\n \u0026lt;?} ?\u0026gt;\n \u0026lt;/select\u0026gt;\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eThe DDL's Load but these values are not getting posted to the database.\nform\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e\u0026lt;div class=\"container\"\u0026gt;\n\u0026lt;table width=\"528\" border=\"0\" cellpadding=1 class=\"formTable\" style=\"width: 515px;font-family:arial;font-size:12px;\" \u0026gt;\n\u0026lt;form action=\"form_submit.php\" method=\"POST\" name=\"alto800\" id=\"alto800\" onsubmit=\"return validate();\"\u0026gt;\n \u0026lt;tbody\u0026gt;\n \u0026lt;tr\u0026gt;\n \u0026lt;td width=\"52%\"\u0026gt;Name\u0026lt;/td\u0026gt;\n \u0026lt;td width=\"48%\" \u0026gt;Mobile/Phone No.\u0026lt;/td\u0026gt;\n \u0026lt;/tr\u0026gt;\n \u0026lt;tr\u0026gt;\n \u0026lt;td\u0026gt;\n \u0026lt;select name=\"title\" id=\"mr\" class=\"sel\"\u0026gt;\n \u0026lt;option value=\"mr\"\u0026gt;Mr.\u0026lt;/option\u0026gt;\n \u0026lt;option value=\"mrs\"\u0026gt;Mrs.\u0026lt;/option\u0026gt;\n \u0026lt;/select\u0026gt;\n \u0026lt;input type=\"text\" name=\"name\" id=\"name\" class=\"formName\" /\u0026gt;\n \u0026lt;/td\u0026gt;\n \u0026lt;td\u0026gt;\n \u0026lt;input type=\"text\" name=\"mobile\" id=\"mobile\"/\u0026gt;\n \u0026lt;/td\u0026gt;\n \u0026lt;/tr\u0026gt;\n \u0026lt;tr\u0026gt;\n \u0026lt;td \u0026gt;State\u0026lt;/td\u0026gt;\n \u0026lt;td \u0026gt;City\u0026lt;/td\u0026gt;\n \u0026lt;/tr\u0026gt;\n \u0026lt;tr\u0026gt;\n \u0026lt;td\u0026gt;\n \u0026lt;select name=\"state\" id=\"state\" class=\"sel\" onchange=\"getCity(this.value)\"\u0026gt;\n \u0026lt;option value=\"0\"\u0026gt;Select state\u0026lt;/option\u0026gt;\n \u0026lt;option value=\"AN\"\u0026gt;ANDAMAN\u0026lt;/option\u0026gt;\n\u0026lt;option value=\"AP\"\u0026gt;ANDHRA PRADESH\u0026lt;/option\u0026gt;\n\u0026lt;option value=\"AR\"\u0026gt;ARUNANCHAL PRADESH\u0026lt;/option\u0026gt;\n\u0026lt;option value=\"AS\"\u0026gt;ASSAM\u0026lt;/option\u0026gt;\n\u0026lt;option value=\"BH\"\u0026gt;BIHAR\u0026lt;/option\u0026gt;\n\u0026lt;option value=\"CG\"\u0026gt;CHATTISGARH\u0026lt;/option\u0026gt;\n\u0026lt;option value=\"CH\"\u0026gt;CHANDIGARH\u0026lt;/option\u0026gt;\n\u0026lt;option value=\"DL\"\u0026gt;DELHI\u0026lt;/option\u0026gt;\n\u0026lt;option value=\"DM\"\u0026gt;DAMAN\u0026lt;/option\u0026gt;\n\u0026lt;option value=\"DN\"\u0026gt;DADRA \u0026amp; NAGAR HAVELI\u0026lt;/option\u0026gt;\n\u0026lt;option value=\"GJ\"\u0026gt;GUJRAT\u0026lt;/option\u0026gt;\n\u0026lt;option value=\"GO\"\u0026gt;GOA\u0026lt;/option\u0026gt;\n\u0026lt;option value=\"HN\"\u0026gt;HARYANA\u0026lt;/option\u0026gt;\n\u0026lt;option value=\"HP\"\u0026gt;HIMACHAL PRADESH\u0026lt;/option\u0026gt;\n\u0026lt;option value=\"JH\"\u0026gt;JHARKHAND\u0026lt;/option\u0026gt;\n\u0026lt;option value=\"JK\"\u0026gt;JAMMU \u0026amp; KASHMIR\u0026lt;/option\u0026gt;\n\u0026lt;option value=\"KL\"\u0026gt;KERALA\u0026lt;/option\u0026gt;\n\u0026lt;option value=\"KT\"\u0026gt;KARNATAKA\u0026lt;/option\u0026gt;\n\u0026lt;option value=\"MH\"\u0026gt;MAHARASHTRA\u0026lt;/option\u0026gt;\n\u0026lt;option value=\"ML\"\u0026gt;MEGHALAYA\u0026lt;/option\u0026gt;\n\u0026lt;option value=\"MN\"\u0026gt;MANIPUR\u0026lt;/option\u0026gt;\n\u0026lt;option value=\"MP\"\u0026gt;MADHYA PRADESH\u0026lt;/option\u0026gt;\n\u0026lt;option value=\"MZ\"\u0026gt;MIZORAM\u0026lt;/option\u0026gt;\n\u0026lt;option value=\"NG\"\u0026gt;NAGALAND\u0026lt;/option\u0026gt;\n\u0026lt;option value=\"OS\"\u0026gt;ORISSA\u0026lt;/option\u0026gt;\n\u0026lt;option value=\"PJ\"\u0026gt;PUNJAB\u0026lt;/option\u0026gt;\n\u0026lt;option value=\"PY\"\u0026gt;PONDICHERRY\u0026lt;/option\u0026gt;\n\u0026lt;option value=\"RJ\"\u0026gt;RAJASTHAN\u0026lt;/option\u0026gt;\n\u0026lt;option value=\"SK\"\u0026gt;SIKKIM\u0026lt;/option\u0026gt;\n\u0026lt;option value=\"TN\"\u0026gt;TAMIL NADU\u0026lt;/option\u0026gt;\n\u0026lt;option value=\"TR\"\u0026gt;TRIPURA\u0026lt;/option\u0026gt;\n\u0026lt;option value=\"UP\"\u0026gt;UTTAR PRADESH\u0026lt;/option\u0026gt;\n\u0026lt;option value=\"UT\"\u0026gt;UTTARANCHAL\u0026lt;/option\u0026gt;\n\u0026lt;option value=\"WB\"\u0026gt;WEST BENGAL\u0026lt;/option\u0026gt;\n\n \u0026lt;/select\u0026gt;\n \u0026lt;/td\u0026gt;\n \u0026lt;td\u0026gt;\u0026lt;div id=\"citydiv\"\u0026gt;\n \u0026lt;select name=\"city\" id=\"city\" class=\"sel\" onChange=\"getDealer(this.value)\" \u0026gt;\n \u0026lt;option value=\"0\"\u0026gt;Select state first\u0026lt;/option\u0026gt;\n \u0026lt;/select\u0026gt;\n \u0026lt;/div\u0026gt;\n\n \u0026lt;/td\u0026gt;\n \u0026lt;/tr\u0026gt;\n \u0026lt;tr\u0026gt;\n \u0026lt;td \u0026gt;Preffered Dealer\u0026lt;/td\u0026gt;\n \u0026lt;td \u0026gt;\u0026amp;nbsp;\u0026lt;/td\u0026gt;\n \u0026lt;/tr\u0026gt;\n \u0026lt;tr\u0026gt; \n \u0026lt;td colspan=\"2\"\u0026gt;\u0026lt;div id=\"dealerdiv\"\u0026gt;\u0026lt;select name=\"dealer\" style=\"width:500px;height:25px;\" \u0026gt;\n \u0026lt;option value=\"0\"\u0026gt;Select city first\u0026lt;/option\u0026gt;\n \u0026lt;/select\u0026gt; \u0026lt;/div\u0026gt; \u0026lt;/td\u0026gt;\n \u0026lt;/tr\u0026gt;\n \u0026lt;tr\u0026gt;\n \u0026lt;td\u0026gt;Email Address\u0026lt;/td\u0026gt;\n \u0026lt;td\u0026gt;\u0026amp;nbsp;\u0026lt;/td\u0026gt;\n \u0026lt;/tr\u0026gt;\n \u0026lt;tr\u0026gt;\n \u0026lt;td\u0026gt;\u0026lt;input type=\"text\" name=\"email\" id=\"email\" /\u0026gt;\u0026lt;/td\u0026gt;\n \u0026lt;td\u0026gt;\u0026amp;nbsp;\u0026lt;/td\u0026gt;\n \u0026lt;/tr\u0026gt;\n \u0026lt;tr\u0026gt;\n \u0026lt;td \u0026gt;Your Query\u0026lt;/td\u0026gt;\n \u0026lt;td rowspan=\"2\" \u0026gt;\u0026lt;br /\u0026gt;\n \u0026lt;br /\u0026gt; \u0026lt;/td\u0026gt;\n \u0026lt;/tr\u0026gt;\n \u0026lt;tr\u0026gt;\n \u0026lt;td\u0026gt;\n \u0026lt;textarea name=\"query\" id=\"query\"\u0026gt;\u0026lt;/textarea\u0026gt;\n\n \u0026lt;/td\u0026gt;\n \u0026lt;/tr\u0026gt;\n \u0026lt;tr\u0026gt;\n \u0026lt;td \u0026gt;\n \u0026lt;div style=\"height:10px\"\u0026gt;\u0026lt;/div\u0026gt;\n \u0026lt;input type=\"image\" name=\"submit\" value=\"submit\" src=\"images/submit.png\" /\u0026gt;\n \u0026lt;/td\u0026gt;\n \u0026lt;td \u0026gt;\u0026lt;/td\u0026gt;\n \u0026lt;/tr\u0026gt;\n\n \u0026lt;/tbody\u0026gt;\n \u0026lt;/form\u0026gt;\n\u0026lt;/table\u0026gt;\n\u003c/code\u003e\u003c/pre\u003e","answer_count":"0","comment_count":"6","creation_date":"2012-10-18 13:36:15.2 UTC","favorite_count":"0","last_activity_date":"2013-12-23 21:02:09.017 UTC","last_edit_date":"2013-12-23 21:02:09.017 UTC","last_editor_display_name":"","last_editor_user_id":"759866","owner_display_name":"","owner_user_id":"1450352","post_type_id":"1","score":"0","tags":"php|javascript|ajax|list|drop-down-menu","view_count":"1334"} {"id":"14060927","title":"Post javascript module array as List object with ajax in asp .net mvc project","body":"\u003cp\u003eI want to send JavaScript object array as list object to server, the server side method(GetData) accepts list object with 3 elements, but all elements have null value. Any advice? Thanks in advance.\u003c/p\u003e\n\n\u003cp\u003e\u003cstrong\u003eAt Client:\u003c/strong\u003e\u003c/p\u003e\n\n\u003cp\u003e\u003cem\u003eUser.js\u003c/em\u003e\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003edefine(function () { \n function User(name) {\n this.Name = name \n }\n return User;\n});\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003e\u003cem\u003emain.js\u003c/em\u003e\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003evar users = [new User('Barney'),\n new User('Cartman'),\n new User('Sheldon')];\n $.ajax({\n type: \"POST\",\n url: \"/Home/GetData\",\n data: {users: users},\n success: function (data) {\n //alert(data.Result);\n },\n dataType: \"json\"\n });\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003e\u003cstrong\u003eAt Server:\u003c/strong\u003e\u003c/p\u003e\n\n\u003cp\u003e\u003cem\u003eGetData action\u003c/em\u003e\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003epublic void GetData(List\u0026lt;User\u0026gt; users){\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003e\u003cem\u003eUser Model\u003c/em\u003e\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003epublic class User {\n public string Name { get; set; }\n}\n\u003c/code\u003e\u003c/pre\u003e","accepted_answer_id":"14069344","answer_count":"1","comment_count":"3","creation_date":"2012-12-27 20:32:04.027 UTC","last_activity_date":"2012-12-28 12:13:50.533 UTC","last_edit_date":"2012-12-27 20:37:21.337 UTC","last_editor_display_name":"","last_editor_user_id":"137626","owner_display_name":"","owner_user_id":"658735","post_type_id":"1","score":"0","tags":"asp.net-mvc|list|jquery","view_count":"1820"} -{"id":"40465813","title":"return hits from one bucket when doing a geodistance search in elasticsearch .net","body":"\u003cp\u003eI want to do a geosearch where it should first search for all locations within a distance of 50 meters, if more than 5 hits are found, then return those. If less than 5 hits are found I want to expand and search all locations within a distance of 400 meters. Again, if less than 5 hits are found I want to expand to 1000 meters but if less than 5 hits are found there I want to return those and not expand further. I don't want to return the 5 closest results, I want to return all the hits from up to the distance used.\u003c/p\u003e\n\n\u003cp\u003eI'm aggregating like this:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eaggregations.GeoDistance(\"nearby_locations\", g =\u0026gt; g\n .Field(f =\u0026gt; f.GeoLocations)\n .DistanceType(GeoDistanceType.Arc)\n .Unit(DistanceUnit.Meters)\n .Origin((double)position.X, (double)position.Y)\n .Ranges(\n r =\u0026gt; r.To(50),\n r =\u0026gt; r.To(400),\n r =\u0026gt; r.To(1000)));\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eBut I don't know how to return the hits for the first bucket that has over 5 hits. At the moment I'm checking which bucket that had more than 5 hits and then do another search on that distance.\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003evar maxDistance = 1000;\nresponse = Search(query, skip, size, position, maxDistance);\nvar distanceBucket = response.Aggs.GeoDistance(\"nearby_locations\").Buckets\n .FirstOrDefault(x =\u0026gt; x.DocCount \u0026gt; 5);\n\nif(distanceBucket != null) {\n distanceUsed = (int)distanceBucket.To.Value;\n response = Search(query, skip, size, position, distanceUsed);\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eThis works, but I was wondering if there is a better way to achieve this?\u003c/p\u003e","answer_count":"0","comment_count":"3","creation_date":"2016-11-07 12:58:45.693 UTC","last_activity_date":"2016-11-14 14:26:28.767 UTC","last_edit_date":"2016-11-14 14:26:28.767 UTC","last_editor_display_name":"","last_editor_user_id":"6344649","owner_display_name":"","owner_user_id":"6344649","post_type_id":"1","score":"5","tags":"elasticsearch|nest|elasticsearch-2.0|elasticsearch-net","view_count":"98"} +{"id":"40465813","title":"return hits from one bucket when doing a geodistance search in opensearch .net","body":"\u003cp\u003eI want to do a geosearch where it should first search for all locations within a distance of 50 meters, if more than 5 hits are found, then return those. If less than 5 hits are found I want to expand and search all locations within a distance of 400 meters. Again, if less than 5 hits are found I want to expand to 1000 meters but if less than 5 hits are found there I want to return those and not expand further. I don't want to return the 5 closest results, I want to return all the hits from up to the distance used.\u003c/p\u003e\n\n\u003cp\u003eI'm aggregating like this:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eaggregations.GeoDistance(\"nearby_locations\", g =\u0026gt; g\n .Field(f =\u0026gt; f.GeoLocations)\n .DistanceType(GeoDistanceType.Arc)\n .Unit(DistanceUnit.Meters)\n .Origin((double)position.X, (double)position.Y)\n .Ranges(\n r =\u0026gt; r.To(50),\n r =\u0026gt; r.To(400),\n r =\u0026gt; r.To(1000)));\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eBut I don't know how to return the hits for the first bucket that has over 5 hits. At the moment I'm checking which bucket that had more than 5 hits and then do another search on that distance.\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003evar maxDistance = 1000;\nresponse = Search(query, skip, size, position, maxDistance);\nvar distanceBucket = response.Aggs.GeoDistance(\"nearby_locations\").Buckets\n .FirstOrDefault(x =\u0026gt; x.DocCount \u0026gt; 5);\n\nif(distanceBucket != null) {\n distanceUsed = (int)distanceBucket.To.Value;\n response = Search(query, skip, size, position, distanceUsed);\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eThis works, but I was wondering if there is a better way to achieve this?\u003c/p\u003e","answer_count":"0","comment_count":"3","creation_date":"2016-11-07 12:58:45.693 UTC","last_activity_date":"2016-11-14 14:26:28.767 UTC","last_edit_date":"2016-11-14 14:26:28.767 UTC","last_editor_display_name":"","last_editor_user_id":"6344649","owner_display_name":"","owner_user_id":"6344649","post_type_id":"1","score":"5","tags":"opensearch|nest|opensearch-2.0|opensearch-net","view_count":"98"} {"id":"30584070","title":"How can I make a symbol point at the mouse?","body":"\u003cp\u003eI want to make a symbol rotate to point at the mouse. I'm using this function, but it doesn't work below the symbol's pivot. The inverse tan function has a range of 180 degrees right? So how can i get 360 degrees of movement?\u003c/p\u003e\n\n\u003cp\u003eWould I need to add an if statement to check the mouse position or is there a more elegant solution?\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003efunction panelTrack(){\n angle = -180/Math.PI * Math.atan((mouseX - panel.x)/(mouseY - panel.y));\n panel.rotation = angle;\n trace(panel.rotation);\n}\n\u003c/code\u003e\u003c/pre\u003e","accepted_answer_id":"30588940","answer_count":"2","comment_count":"0","creation_date":"2015-06-01 22:02:26.943 UTC","last_activity_date":"2015-06-09 16:38:44.79 UTC","last_edit_date":"2015-06-01 22:16:49.52 UTC","last_editor_display_name":"","last_editor_user_id":"4347580","owner_display_name":"","owner_user_id":"4804026","post_type_id":"1","score":"1","tags":"actionscript-3|geometry|displayobject|angle","view_count":"57"} {"id":"22107187","title":"smarty php anchor link not working","body":"\u003cp\u003eHi I was wondering if you could help me with an issue where with my smarty php code won't work. The problem is the anchor tag containing all of the code in this section won't actually surround it when it is outputted to the web page.\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e\u0026lt;a href=\"mylink\"\u0026gt; //the link that does not actually work/surround the code below when outputed\n \u0026lt;div class=\"row\" {if $smarty.section.pm_loop.last}style=\"border:none;\"{/if}\u0026gt;\n \u0026lt;div class=\"f-right\" style=\"padding-right: 35px\"\u0026gt;\n \u0026lt;a href='UserMessagesNew.php?pm_id={$pms[pm_loop].pm_id}'\u0026gt;{$Application659}\u0026lt;/a\u0026gt;\u0026lt;br/\u0026gt;\n \u0026lt;a href='UserMessagesView.php?pm_id={$pms[pm_loop].pm_id}\u0026amp;task=delete'\u0026gt;{$Application660}\u0026lt;/a\u0026gt;\u0026lt;br/\u0026gt;\n \u0026lt;input type='checkbox' name='message_{$pms[pm_loop].pm_id}' value='1' style=\"margin:0; height:15px; width:15px;\"/\u0026gt;\n \u0026lt;/div\u0026gt;\n \u0026lt;a class=\"f-left\" href=\"UserMessagesView.php?pm_id={$pms[pm_loop].pm_id}\"\u0026gt;\u0026lt;img src=\"{$pms[pm_loop].pm_user-\u0026gt;user_photo('./images/nophoto.gif')}\" class='img' width=\"92px\" alt=\"{$pms[pm_loop].pm_user-\u0026gt;user_info.user_username} {$Application500}\"\u0026gt;\u0026lt;/a\u0026gt;\n \u0026lt;a href=\"#\" class=\"msg-info-c\"\u0026gt;\n \u0026lt;div class=\"msg-user-re\"\u0026gt;\u0026lt;b\u0026gt;\u0026lt;a href=\"UserMessagesView.php?pm_id={$pms[pm_loop].pm_id}\"\u0026gt;{$pms[pm_loop].pm_user-\u0026gt;user_info.user_username}\u0026lt;/a\u0026gt;\u0026lt;/b\u0026gt;\u0026lt;/div\u0026gt;\n \u0026lt;a href=\"UserMessagesView.php?pm_id={$pms[pm_loop].pm_id}\"\u0026gt;\u0026lt;div class=\"msg-datet\"\u0026gt;{$datetime-\u0026gt;cdate(\"`$setting.setting_timeformat` `$setting.setting_dateformat`\", $datetime-\u0026gt;timezone($pms[pm_loop].pm_date, $global_timezone))}\u0026lt;/div\u0026gt;\u0026lt;/a\u0026gt;\n \u0026lt;a href=\"UserMessagesView.php?pm_id={$pms[pm_loop].pm_id}\"\u0026gt;\u0026lt;div class=\"user-msg-c\"\u0026gt;{$pms[pm_loop].pm_body|truncate:100|choptext:75:\"\u0026lt;br\u0026gt;\"}\u0026lt;/div\u0026gt;\u0026lt;/a\u0026gt;\n \u0026lt;/a\u0026gt;\n \u0026lt;/div\u0026gt;\n\u0026lt;/a\u0026gt;\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eThe output looks like this:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e\u0026lt;a href=\"mylink\"\u0026gt;\u0026lt;/a\u0026gt;\n\u0026lt;div class=\"row\"\u0026gt;\n\n rest of content inside here\n\n\u0026lt;/div\u0026gt;\n\u003c/code\u003e\u003c/pre\u003e","accepted_answer_id":"22107576","answer_count":"1","comment_count":"9","creation_date":"2014-02-28 22:37:46.14 UTC","last_activity_date":"2014-02-28 23:12:47.39 UTC","last_edit_date":"2014-02-28 22:47:31.377 UTC","last_editor_display_name":"","last_editor_user_id":"3150271","owner_display_name":"","owner_user_id":"3329290","post_type_id":"1","score":"0","tags":"php|html|anchor|smarty","view_count":"858"} {"id":"45397663","title":"Route inbound calls to different SIP Trunks in Asterisk Python","body":"\u003cp\u003eI recently started a project to route inbound calls to different softphones. \u003c/p\u003e\n\n\u003cp\u003e\u003cstrong\u003eWhat I Did\u003c/strong\u003e\u003c/p\u003e\n\n\u003cp\u003eI'm using Raspberry Pi to do this. So in raspberry pi I've installed asterisk and python and pyst package to connect asterisk and python. However I want to route incoming call to different softphones in the network based on caller ID. So though to use Zoiper application in several mobiles which have connected to the same Wi-Fi network. \u003c/p\u003e\n\n\u003cp\u003e\u003cstrong\u003eWhat I want\u003c/strong\u003e\u003c/p\u003e\n\n\u003cp\u003eI want to know how can I use python and pyst functions or AGI functions to route incoming call to specific softphone. I know I've to make an asterisk SIP server and add SIP client info to the softphone. But I can't get a proper idea how to do that when it comes to several softphones. \u003c/p\u003e\n\n\u003cp\u003eAlso I'm running asterisk on freePBX and I'm using Python IDLE IDE. So I wish I could only use codes to accomplish this than setting up by freePBX web GUI. Please help. \u003c/p\u003e","answer_count":"1","comment_count":"0","creation_date":"2017-07-30 07:59:08.36 UTC","last_activity_date":"2017-07-31 06:34:16.51 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"8107802","post_type_id":"1","score":"-1","tags":"python-2.7|asterisk|raspberry-pi3|freepbx","view_count":"32"} @@ -1434,7 +1434,7 @@ {"id":"10426636","title":"Why a lot of binary-tree data structures in C do not have a parent node pointer?","body":"\u003cp\u003eI'm new to C programming, and I'm learning C algorithms with C.\u003c/p\u003e\n\n\u003cp\u003eHere is my problem about how to define the binary tree \u003ccode\u003enode\u003c/code\u003e data structure.\u003c/p\u003e\n\n\u003ch2\u003eUse or NOT use a parent node pointer\u003c/h2\u003e\n\n\u003cp\u003eHere are 2 typical sample code for defining a \u003ccode\u003eNode\u003c/code\u003e data structure.\u003c/p\u003e\n\n\u003ch3\u003eWithout parent node pointer\u003c/h3\u003e\n\n\u003cpre\u003e\u003ccode\u003etypedef struct binaryTreeNode_{\n int key;\n void *data;\n binaryTreeNode_ *leftNode;\n binaryTreeNode_ *rightNode;\n} binaryTreeNode;\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003ch3\u003eWith parent node pointer\u003c/h3\u003e\n\n\u003cpre\u003e\u003ccode\u003etypedef struct binaryTreeNode_{\n int key;\n void *data;\n binaryTreeNode_ *leftNode;\n binaryTreeNode_ *rightNode;\n binaryTreeNode_ *parentNode;\n} binaryTreeNode;\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003ch2\u003eMy question\u003c/h2\u003e\n\n\u003cp\u003eObviously, using a node structure with a parent node pointer will make a lot of work much more easier. Like traverse a node/a tree, DFS/BFS with binary tree. So my question is \u003cstrong\u003ewhy there are some solutions that are based on a structure without parent node?\u003c/strong\u003e.\u003c/p\u003e\n\n\u003cp\u003eAre there any historical reasons? If simply because the limitation of RAM/DISK capacity, I think we can drop the solution that does not have a parent node, can't we? \u003c/p\u003e\n\n\u003ch2\u003eMaybe not relavent\u003c/h2\u003e\n\n\u003cp\u003eJust like \u003cem\u003eLinked List\u003c/em\u003e and \u003cem\u003eDoubly Linked List\u003c/em\u003e, should we use \u003cem\u003eDoubly Linked List\u003c/em\u003e to implement \u003ccode\u003eStack\u003c/code\u003e and \u003ccode\u003eQueue\u003c/code\u003e?\u003c/p\u003e","accepted_answer_id":"10426763","answer_count":"7","comment_count":"0","creation_date":"2012-05-03 07:03:44.287 UTC","last_activity_date":"2017-05-16 06:30:20.027 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"206820","post_type_id":"1","score":"6","tags":"c|algorithm|binary-tree","view_count":"3320"} {"id":"11928886","title":"How to create a folder in svn repository on fedora","body":"\u003cp\u003eHi i did't used any version control system until now so basically i am newbie in using it.\u003c/p\u003e\n\n\u003cp\u003ePresently the os i am using is Fedora and svn for version controlling, suppose i had given below link to access files\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003ehttp://example-theory.com/svn-repos/files/ \n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eWhen i clicked on it, i had given username and password and able to access all the files in it. for example the format is as below\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003esvn-repos - Revision 4: /files\n\n..\nExamplefolder_1/\nNewfolder_2/\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eBut i want to create a separate folder with name \u003ccode\u003elatest_files\u003c/code\u003e and need to copy some \u003ccode\u003epdf files\u003c/code\u003e and \u003ccode\u003e.py\u003c/code\u003e in to it.\u003c/p\u003e\n\n\u003cp\u003eHow can i create a folder in svn repository\u003cbr\u003e\nHow to copy the pdf and other files in to it.\u003c/p\u003e\n\n\u003cp\u003eThanks in advance.\u003c/p\u003e\n\n\u003cp\u003e\u003cstrong\u003eEdited Code:\u003c/strong\u003e\u003c/p\u003e\n\n\u003col\u003e\n\u003cli\u003eInstalled subversion \u003c/li\u003e\n\u003cli\u003eTried to creater a repository with this command \u003ccode\u003esvnadmin create svn\u003c/code\u003e \nCreated a folde svn\u003c/li\u003e\n\u003cli\u003e\u003cp\u003eTried to make a directory inside the folder \u003ccode\u003esvn\u003c/code\u003e with name \u003ccode\u003efolder_example\u003c/code\u003e with the following command \u003c/p\u003e\n\n\u003cp\u003esvn mkdir folder_example\u003c/p\u003e\u003c/li\u003e\n\u003c/ol\u003e\n\n\u003cp\u003eI recieved the following error\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003esvn mkdir folder_example\nsvn: '.' is not a working copy\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eMy intension is to create a folder and import all the files from the link \u003ccode\u003ehttp://example-theory.com/svn-repos/files/\u003c/code\u003e and update and commit with changes in that.\u003c/p\u003e","accepted_answer_id":"11929194","answer_count":"1","comment_count":"5","creation_date":"2012-08-13 06:02:07.21 UTC","last_activity_date":"2012-08-13 07:12:13.553 UTC","last_edit_date":"2012-08-13 07:12:13.553 UTC","last_editor_display_name":"","last_editor_user_id":"1342109","owner_display_name":"","owner_user_id":"1342109","post_type_id":"1","score":"0","tags":"svn|fedora","view_count":"1355"} {"id":"31215791","title":"Redirection to index in Django and Angular","body":"\u003cp\u003eI have a project with Django and Angular.\u003c/p\u003e\n\n\u003cp\u003eI have a page located at \u003ca href=\"http://localhost:8000/install/#/\" rel=\"nofollow\"\u003ehttp://localhost:8000/install/#/\u003c/a\u003e, that is part of the \u003cem\u003einstall\u003c/em\u003e app. There I have a button which POST a form, then I want the function in my controller to redirect to \u003ca href=\"http://localhost:8000/\" rel=\"nofollow\"\u003ehttp://localhost:8000/\u003c/a\u003e, my index page.\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e$http.post('install/send_install_form/', installation) //installation is a dictionary\n .then(function(data){\n alert('You will be redirected')\n setTimeout(function(){\n // REDIRECTION TO THE INDEX..?\n },\n 4000\n )\n },\n function(data){\n alert(\"Error\")\n })\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eHow do we link between and out of apps with Django-Angular?\u003c/p\u003e\n\n\u003cp\u003eThank you!\u003c/p\u003e","accepted_answer_id":"31216099","answer_count":"1","comment_count":"0","creation_date":"2015-07-04 00:48:54.593 UTC","last_activity_date":"2015-07-04 01:49:48.083 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"5079316","post_type_id":"1","score":"1","tags":"angularjs|django|redirect|indexing","view_count":"316"} -{"id":"36737772","title":"Invalid JSON when attributesFormat=object is used in the tourguide","body":"\u003cp\u003eWe found an issue with \u003ccode\u003eattributesFormat=object\u003c/code\u003e while testing the tourguide application \u003ca href=\"https://github.com/Fiware/tutorials.TourGuide-App\" rel=\"nofollow\"\u003ehttps://github.com/Fiware/tutorials.TourGuide-App\u003c/a\u003e\u003c/p\u003e\n\n\u003cp\u003eIf we perform the following request:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003ecurl \u0026lt;cb_host\u0026gt;:\u0026lt;cb_port\u0026gt;/v1/contextEntities/type/Restaurant/id/Elizalde -s -S --header 'Content-Type: application/json' --header 'x-auth-token:\u0026lt;token\u0026gt;' --header 'Fiware-service: tourguide' --header 'Accept: application/json'\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003ewe get this valid JSON:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e{\n \"contextElement\" : {\n \"type\" : \"Restaurant\",\n \"isPattern\" : \"false\",\n \"id\" : \"Elizalde\",\n \"attributes\" : [\n {\n \"name\" : \"address\",\n \"type\" : \"\",\n \"value\" : {\n \"type\" : \"postalAddress\",\n \"streetAddress\" : \"Cuesta de las Cabras Aldapa 2\",\n \"addressRegion\" : \"Araba\",\n \"addressLocality\" : \"Alegría-Dulantzi\",\n \"postalCode\" : \"01240\"\n }\n },\n {\n \"name\" : \"aggregateRating\",\n \"type\" : \"\",\n \"value\" : {\n \"reviewCount\" : 1,\n \"ratingValue\" : 3\n }\n },\n {\n \"name\" : \"capacity\",\n \"type\" : \"PropertyValue\",\n \"value\" : 120,\n \"metadatas\" : [\n {\n \"name\" : \"name\",\n \"type\" : \"\",\n \"value\" : \"capacity\"\n }\n ]\n },\n {\n \"name\" : \"department\",\n \"type\" : \"\",\n \"value\" : \"Franchise3\"\n },\n {\n \"name\" : \"description\",\n \"type\" : \"\",\n \"value\" : \"Restaurante de estilo sidrería ubicado en Alegria-Dulantzi. Además de su menú del día y carta, también ofrece menú de sidrería. El menú del día cuesta 9 euros. Los fines de semana la especialidad de la casa son las alubias con sacramentos. En lo que a bebidas se refiere, hay una amplia selección además de la sidra. Cabe destacar que se puede hacer txotx. La capacidad del establecimiento es de 50 personas pero la sidrería no dispone de aparcamiento.%5cn%5cnHORARIO: %5cn%5cnLunes a domingo: 9:00-17:00 y 19:00-23:00.\"\n },\n {\n \"name\" : \"occupancyLevels\",\n \"type\" : \"PropertyValue\",\n \"value\" : 0,\n \"metadatas\" : [\n {\n \"name\" : \"timestamp\",\n \"type\" : \"\",\n \"value\" : \"\"\n },\n {\n \"name\" : \"name\",\n \"type\" : \"\",\n \"value\" : \"occupancyLevels\"\n }\n ]\n },\n {\n \"name\" : \"position\",\n \"type\" : \"coords\",\n \"value\" : \"42.8404625, -2.5123277\",\n \"metadatas\" : [\n {\n \"name\" : \"location\",\n \"type\" : \"string\",\n \"value\" : \"WGS84\"\n }\n ]\n },\n {\n \"name\" : \"priceRange\",\n \"type\" : \"\",\n \"value\" : 0\n },\n {\n \"name\" : \"telephone\",\n \"type\" : \"\",\n \"value\" : \"945 400 868\"\n }\n ]\n },\n \"statusCode\" : {\n \"code\" : \"200\",\n \"reasonPhrase\" : \"OK\"\n }\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eBut if we use the attributesFormat=object:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003ecurl \u0026lt;cb_host\u0026gt;:\u0026lt;cb_port\u0026gt;/v1/contextEntities/type/Restaurant/id/Elizalde?attributesFormat=object -s -S --header 'Content-Type: application/json' --header 'x-auth-token:\u0026lt;token\u0026gt;' --header 'Fiware-service: tourguide' --header 'Accept: application/json'\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003ewe get this invalid JSON:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e{\n \"contextElement\": {\n \"type\": \"Restaurant\",\n \"isPattern\": \"false\",\n \"id\": \"Elizalde\",\n \"attributes\": {\n \"address\": {\n \"type\": \"\",\n \"value\": {\n \"type\": \"postalAddress\",\n \"streetAddress\": \"Cuesta de las Cabras Aldapa 2\",\n \"addressRegion\": \"Araba\",\n \"addressLocality\": \"Alegría-Dulantzi\",\n \"postalCode\": \"01240\"\n }\n },\n \"aggregateRating\": {\n \"type\": \"\",\n \"value\": {\n \"reviewCount\": 1,\n \"ratingValue\": 3\n }\n },\n \"capacity\": {\n \"type\": \"PropertyValue\",\n \"120\",\n \"metadatas\": [{\n \"name\": \"name\",\n \"type\": \"\",\n \"value\": \"capacity\"\n }]\n },\n \"department\": {\n \"type\": \"\",\n \"value\": \"Franchise3\"\n },\n \"description\": {\n \"type\": \"\",\n \"value\": \"Restaurante de estilo sidrería ubicado en Alegria-Dulantzi. Además de su menú del día y carta, también ofrece menú de sidrería. El menú del día cuesta 9 euros. Los fines de semana la especialidad de la casa son las alubias con sacramentos. En lo que a bebidas se refiere, hay una amplia selección además de la sidra. Cabe destacar que se puede hacer txotx. La capacidad del establecimiento es de 50 personas pero la sidrería no dispone de aparcamiento.%5cn%5cnHORARIO: %5cn%5cnLunes a domingo: 9:00-17:00 y 19:00-23:00.\"\n },\n \"occupancyLevels\": {\n \"type\": \"PropertyValue\",\n \"0\",\n \"metadatas\": [{\n \"name\": \"timestamp\",\n \"type\": \"\",\n \"value\": \"\"\n }, {\n \"name\": \"name\",\n \"type\": \"\",\n \"value\": \"occupancyLevels\"\n }]\n },\n \"position\": {\n \"type\": \"coords\",\n \"value\": \"42.8404625, -2.5123277\",\n \"metadatas\": [{\n \"name\": \"location\",\n \"type\": \"string\",\n \"value\": \"WGS84\"\n }]\n },\n \"priceRange\": {\n \"type\": \"\",\n \"0\"\n },\n \"telephone\": {\n \"type\": \"\",\n \"value\": \"945 400 868\"\n }\n }\n },\n \"statusCode\": {\n \"code\": \"200\",\n \"reasonPhrase\": \"OK\"\n }\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eSteps to replicate:\u003c/p\u003e\n\n\u003cul\u003e\n\u003cli\u003eClone the tourguide repository.\u003c/li\u003e\n\u003cli\u003eDeploy the docker containers: \u003ccode\u003ecd fiware-devguide-app/docker/compose\u003c/code\u003e and \u003ccode\u003edocker-compose -f docker-compose.yml up\u003c/code\u003e\u003c/li\u003e\n\u003cli\u003eGet token as explained here : \u003ca href=\"https://github.com/Fiware/tutorials.TourGuide-App#how-to-retrieve-an-oauth-token-to-use-the-api\" rel=\"nofollow\"\u003ehttps://github.com/Fiware/tutorials.TourGuide-App#how-to-retrieve-an-oauth-token-to-use-the-api\u003c/a\u003e\u003c/li\u003e\n\u003cli\u003ePerform the specified requests.\u003c/li\u003e\n\u003c/ul\u003e\n\n\u003cp\u003eInformation about orion version: \u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e\u0026lt;orion\u0026gt;\n \u0026lt;version\u0026gt;0.28.0\u0026lt;/version\u0026gt;\n \u0026lt;uptime\u0026gt;0 d, 1 h, 12 m, 25 s\u0026lt;/uptime\u0026gt;\n \u0026lt;git_hash\u0026gt;aaf8020a5de680b6d7e0c00c70cf425bcc4f39c8\u0026lt;/git_hash\u0026gt;\n \u0026lt;compile_time\u0026gt;Mon Mar 21 13:20:37 UTC 2016\u0026lt;/compile_time\u0026gt;\n \u0026lt;compiled_by\u0026gt;root\u0026lt;/compiled_by\u0026gt;\n \u0026lt;compiled_in\u0026gt;838a42ae8431\u0026lt;/compiled_in\u0026gt;\n\u0026lt;/orion\u0026gt;\n\u003c/code\u003e\u003c/pre\u003e","accepted_answer_id":"36748793","answer_count":"1","comment_count":"1","creation_date":"2016-04-20 08:16:49.917 UTC","last_activity_date":"2016-04-20 15:40:11.823 UTC","last_edit_date":"2016-04-20 08:49:42.69 UTC","last_editor_display_name":"","last_editor_user_id":"6172121","owner_display_name":"","owner_user_id":"6172121","post_type_id":"1","score":"1","tags":"fiware-orion","view_count":"35"} +{"id":"36737772","title":"Invalid JSON when attributesFormat=object is used in the tourguide","body":"\u003cp\u003eWe found an issue with \u003ccode\u003eattributesFormat=object\u003c/code\u003e while testing the tourguide application \u003ca href=\"https://github.com/Fiware/tutorials.TourGuide-App\" rel=\"nofollow\"\u003ehttps://github.com/Fiware/tutorials.TourGuide-App\u003c/a\u003e\u003c/p\u003e\n\n\u003cp\u003eIf we perform the following request:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003ecurl \u0026lt;cb_host\u0026gt;:\u0026lt;cb_port\u0026gt;/v1/contextEntities/type/Restaurant/id/Elizalde -s -S --header 'Content-Type: application/json' --header 'x-auth-token:\u0026lt;token\u0026gt;' --header 'Fiware-service: tourguide' --header 'Accept: application/json'\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003ewe get this valid JSON:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e{\n \"contextElement\" : {\n \"type\" : \"Restaurant\",\n \"isPattern\" : \"false\",\n \"id\" : \"Elizalde\",\n \"attributes\" : [\n {\n \"name\" : \"address\",\n \"type\" : \"\",\n \"value\" : {\n \"type\" : \"postalAddress\",\n \"streetAddress\" : \"Cuesta de las Cabras Aldapa 2\",\n \"addressRegion\" : \"Araba\",\n \"addressLocality\" : \"Alegría-Dulantzi\",\n \"postalCode\" : \"01240\"\n }\n },\n {\n \"name\" : \"aggregateRating\",\n \"type\" : \"\",\n \"value\" : {\n \"reviewCount\" : 1,\n \"ratingValue\" : 3\n }\n },\n {\n \"name\" : \"capacity\",\n \"type\" : \"PropertyValue\",\n \"value\" : 120,\n \"metadatas\" : [\n {\n \"name\" : \"name\",\n \"type\" : \"\",\n \"value\" : \"capacity\"\n }\n ]\n },\n {\n \"name\" : \"department\",\n \"type\" : \"\",\n \"value\" : \"Franchise3\"\n },\n {\n \"name\" : \"description\",\n \"type\" : \"\",\n \"value\" : \"Restaurante de estilo sidrería ubicado en Alegria-Dulantzi. Además de su menú del día y carta, también ofrece menú de sidrería. El menú del día cuesta 9 euros. Los fines de semana la especialidad de la casa son las alubias con sacramentos. En lo que a bebidas se refiere, hay una amplia selección además de la sidra. Cabe destacar que se puede hacer txotx. La capacidad del establecimiento opensearch de 50 personas pero la sidrería no dispone de aparcamiento.%5cn%5cnHORARIO: %5cn%5cnLunes a domingo: 9:00-17:00 y 19:00-23:00.\"\n },\n {\n \"name\" : \"occupancyLevels\",\n \"type\" : \"PropertyValue\",\n \"value\" : 0,\n \"metadatas\" : [\n {\n \"name\" : \"timestamp\",\n \"type\" : \"\",\n \"value\" : \"\"\n },\n {\n \"name\" : \"name\",\n \"type\" : \"\",\n \"value\" : \"occupancyLevels\"\n }\n ]\n },\n {\n \"name\" : \"position\",\n \"type\" : \"coords\",\n \"value\" : \"42.8404625, -2.5123277\",\n \"metadatas\" : [\n {\n \"name\" : \"location\",\n \"type\" : \"string\",\n \"value\" : \"WGS84\"\n }\n ]\n },\n {\n \"name\" : \"priceRange\",\n \"type\" : \"\",\n \"value\" : 0\n },\n {\n \"name\" : \"telephone\",\n \"type\" : \"\",\n \"value\" : \"945 400 868\"\n }\n ]\n },\n \"statusCode\" : {\n \"code\" : \"200\",\n \"reasonPhrase\" : \"OK\"\n }\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eBut if we use the attributesFormat=object:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003ecurl \u0026lt;cb_host\u0026gt;:\u0026lt;cb_port\u0026gt;/v1/contextEntities/type/Restaurant/id/Elizalde?attributesFormat=object -s -S --header 'Content-Type: application/json' --header 'x-auth-token:\u0026lt;token\u0026gt;' --header 'Fiware-service: tourguide' --header 'Accept: application/json'\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003ewe get this invalid JSON:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e{\n \"contextElement\": {\n \"type\": \"Restaurant\",\n \"isPattern\": \"false\",\n \"id\": \"Elizalde\",\n \"attributes\": {\n \"address\": {\n \"type\": \"\",\n \"value\": {\n \"type\": \"postalAddress\",\n \"streetAddress\": \"Cuesta de las Cabras Aldapa 2\",\n \"addressRegion\": \"Araba\",\n \"addressLocality\": \"Alegría-Dulantzi\",\n \"postalCode\": \"01240\"\n }\n },\n \"aggregateRating\": {\n \"type\": \"\",\n \"value\": {\n \"reviewCount\": 1,\n \"ratingValue\": 3\n }\n },\n \"capacity\": {\n \"type\": \"PropertyValue\",\n \"120\",\n \"metadatas\": [{\n \"name\": \"name\",\n \"type\": \"\",\n \"value\": \"capacity\"\n }]\n },\n \"department\": {\n \"type\": \"\",\n \"value\": \"Franchise3\"\n },\n \"description\": {\n \"type\": \"\",\n \"value\": \"Restaurante de estilo sidrería ubicado en Alegria-Dulantzi. Además de su menú del día y carta, también ofrece menú de sidrería. El menú del día cuesta 9 euros. Los fines de semana la especialidad de la casa son las alubias con sacramentos. En lo que a bebidas se refiere, hay una amplia selección además de la sidra. Cabe destacar que se puede hacer txotx. La capacidad del establecimiento opensearch de 50 personas pero la sidrería no dispone de aparcamiento.%5cn%5cnHORARIO: %5cn%5cnLunes a domingo: 9:00-17:00 y 19:00-23:00.\"\n },\n \"occupancyLevels\": {\n \"type\": \"PropertyValue\",\n \"0\",\n \"metadatas\": [{\n \"name\": \"timestamp\",\n \"type\": \"\",\n \"value\": \"\"\n }, {\n \"name\": \"name\",\n \"type\": \"\",\n \"value\": \"occupancyLevels\"\n }]\n },\n \"position\": {\n \"type\": \"coords\",\n \"value\": \"42.8404625, -2.5123277\",\n \"metadatas\": [{\n \"name\": \"location\",\n \"type\": \"string\",\n \"value\": \"WGS84\"\n }]\n },\n \"priceRange\": {\n \"type\": \"\",\n \"0\"\n },\n \"telephone\": {\n \"type\": \"\",\n \"value\": \"945 400 868\"\n }\n }\n },\n \"statusCode\": {\n \"code\": \"200\",\n \"reasonPhrase\": \"OK\"\n }\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eSteps to replicate:\u003c/p\u003e\n\n\u003cul\u003e\n\u003cli\u003eClone the tourguide repository.\u003c/li\u003e\n\u003cli\u003eDeploy the docker containers: \u003ccode\u003ecd fiware-devguide-app/docker/compose\u003c/code\u003e and \u003ccode\u003edocker-compose -f docker-compose.yml up\u003c/code\u003e\u003c/li\u003e\n\u003cli\u003eGet token as explained here : \u003ca href=\"https://github.com/Fiware/tutorials.TourGuide-App#how-to-retrieve-an-oauth-token-to-use-the-api\" rel=\"nofollow\"\u003ehttps://github.com/Fiware/tutorials.TourGuide-App#how-to-retrieve-an-oauth-token-to-use-the-api\u003c/a\u003e\u003c/li\u003e\n\u003cli\u003ePerform the specified requests.\u003c/li\u003e\n\u003c/ul\u003e\n\n\u003cp\u003eInformation about orion version: \u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e\u0026lt;orion\u0026gt;\n \u0026lt;version\u0026gt;0.28.0\u0026lt;/version\u0026gt;\n \u0026lt;uptime\u0026gt;0 d, 1 h, 12 m, 25 s\u0026lt;/uptime\u0026gt;\n \u0026lt;git_hash\u0026gt;aaf8020a5de680b6d7e0c00c70cf425bcc4f39c8\u0026lt;/git_hash\u0026gt;\n \u0026lt;compile_time\u0026gt;Mon Mar 21 13:20:37 UTC 2016\u0026lt;/compile_time\u0026gt;\n \u0026lt;compiled_by\u0026gt;root\u0026lt;/compiled_by\u0026gt;\n \u0026lt;compiled_in\u0026gt;838a42ae8431\u0026lt;/compiled_in\u0026gt;\n\u0026lt;/orion\u0026gt;\n\u003c/code\u003e\u003c/pre\u003e","accepted_answer_id":"36748793","answer_count":"1","comment_count":"1","creation_date":"2016-04-20 08:16:49.917 UTC","last_activity_date":"2016-04-20 15:40:11.823 UTC","last_edit_date":"2016-04-20 08:49:42.69 UTC","last_editor_display_name":"","last_editor_user_id":"6172121","owner_display_name":"","owner_user_id":"6172121","post_type_id":"1","score":"1","tags":"fiware-orion","view_count":"35"} {"id":"45780087","title":"How to implement Processor API with Exactly-once mode","body":"\u003cp\u003eI'm studying Kafka Stream and using Processor API to implement my use case. The code below shows the Process method which forwards a message downstream and aborts before calling \u003ccode\u003ecommit\u003c/code\u003e. This causes the stream to be reprocessed and duplicates the message on the Sink.\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003epublic void process(String key, String value) {\n\n context.forward(key, value);\n\n .. \n ..\n //killed\n\n context.commit();\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eprocessing.guarantee parameter:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003estreamsConfiguration.put(StreamsConfig.PROCESSING_GUARANTEE_CONFIG, StreamsConfig.EXACTLY_ONCE);\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eIs there a way to apply the forwarding only when invoking \u003ccode\u003ecommit\u003c/code\u003e statement. If not, what is the correct approach to implement Exactly-once mode.\u003c/p\u003e\n\n\u003cp\u003eThank you\u003c/p\u003e","accepted_answer_id":"45783581","answer_count":"2","comment_count":"0","creation_date":"2017-08-20 08:31:56.173 UTC","last_activity_date":"2017-08-20 15:07:08.43 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"3812692","post_type_id":"1","score":"1","tags":"apache-kafka|apache-kafka-streams","view_count":"90"} {"id":"8595199","title":"Batch script to prefix file names","body":"\u003cp\u003eI am trying to rename files in a batch script like this:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003erename %FOLDER%\\* 1-*\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003ebut when I run the script It overwrites the first two characters of the original names with the prefix \"1-\" instead of adding it to the beginning of the file names. How can I work around this? \u003c/p\u003e","accepted_answer_id":"8595296","answer_count":"2","comment_count":"0","creation_date":"2011-12-21 19:26:20.763 UTC","favorite_count":"1","last_activity_date":"2015-03-03 10:39:20.033 UTC","last_edit_date":"2014-09-26 00:44:54.087 UTC","last_editor_display_name":"","last_editor_user_id":"321731","owner_display_name":"","owner_user_id":"1110477","post_type_id":"1","score":"4","tags":"batch-file|file-io|prefix","view_count":"9896"} {"id":"45409518","title":"Null values ('\\0') are skipped while reading .dat file in AX 2012","body":"\u003cp\u003eI am trying to read a \u003ccode\u003e.dat\u003c/code\u003e file in \u003ccode\u003eAX 2012\u003c/code\u003e through \u003ccode\u003eSystem.IO.StreamReader\u003c/code\u003e. It has values separated by commas. Now, the problem is one of the values is an empty-space and any value after the empty space is not being read.\u003c/p\u003e\n\n\u003cp\u003eEg. \u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003ethe following line : a,b,c, ,d,e,f\nthe values picked : a,b,c, \nI opened the file with notepad ++ and the empty space is actually '\\0' (NULL).\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eAny workarounds for this?\u003c/p\u003e\n\n\u003cp\u003eHelp would be much appreciated.\u003c/p\u003e","answer_count":"0","comment_count":"0","creation_date":"2017-07-31 07:12:12.657 UTC","last_activity_date":"2017-07-31 09:30:58.807 UTC","last_edit_date":"2017-07-31 09:30:58.807 UTC","last_editor_display_name":"","last_editor_user_id":"3151675","owner_display_name":"","owner_user_id":"8392219","post_type_id":"1","score":"0","tags":"microsoft-dynamics","view_count":"10"} @@ -1580,7 +1580,7 @@ {"id":"23553061","title":"How to call a route by its name from inside a handler?","body":"\u003cp\u003eHow do I properly refer to route names from inside handlers?\u003cbr\u003e\nShould \u003ccode\u003emux.NewRouter()\u003c/code\u003e be assigned globally instead of standing inside a function?\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003efunc AnotherHandler(writer http.ResponseWriter, req *http.Request) {\n url, _ := r.Get(\"home\") // I suppose this 'r' should refer to the router\n http.Redirect(writer, req, url, 302)\n}\n\nfunc main() {\n r := mux.NewRouter()\n r.HandleFunc(\"/\", HomeHandler).Name(\"home\")\n r.HandleFunc(\"/nothome/\", AnotherHandler).Name(\"another\")\n http.Handle(\"/\", r)\n http.ListenAndServe(\":8000\", nil)\n}\n\u003c/code\u003e\u003c/pre\u003e","accepted_answer_id":"23554021","answer_count":"1","comment_count":"0","creation_date":"2014-05-08 21:41:03.08 UTC","favorite_count":"1","last_activity_date":"2014-05-08 23:00:14.37 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"1606248","post_type_id":"1","score":"2","tags":"go|mux|gorilla","view_count":"1434"} {"id":"24844786","title":"How to use post from HttpPost in 4.4.2?","body":"\u003cp\u003eI'm working for hours on this problem but got totally stucked. The following code is running perfectly on my test device with Android 4.1.2, but I can't get it to work with the second device\nrunning 4.4.2. I read moving from \u003ccode\u003eApache httpClient\u003c/code\u003e to \u003ccode\u003eHttpURLConnection\u003c/code\u003e solves the problem for some people, but I am also using the \u003ccode\u003eGET\u003c/code\u003e-method wich works fine. I already tried to add headers, as mentioned on sites I found. That did not work. So can you please help me in getting this working on Android 4.4.2 or give me a hint to the right direction? Thanks.\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e List\u0026lt;NameValuePair\u0026gt; params = new ArrayList\u0026lt;NameValuePair\u0026gt;();\n.\n.\n.\n DefaultHttpClient httpClient = new DefaultHttpClient();\n HttpPost httpPost = new HttpPost(url); \n httpPost.setEntity(new UrlEncodedFormEntity(params));\n HttpResponse httpResponse = httpClient.execute(httpPost);\n HttpEntity httpEntity = httpResponse.getEntity();\n is = httpEntity.getContent();\n\u003c/code\u003e\u003c/pre\u003e","answer_count":"1","comment_count":"5","creation_date":"2014-07-19 20:40:39.657 UTC","last_activity_date":"2014-08-19 04:21:09.687 UTC","last_edit_date":"2014-07-19 20:52:39.177 UTC","last_editor_display_name":"","last_editor_user_id":"1567835","owner_display_name":"","owner_user_id":"3856650","post_type_id":"1","score":"0","tags":"json|apache|httpurlconnection|android-4.4-kitkat","view_count":"494"} {"id":"9298909","title":"Exception thrown while logging in to project in windows 7 but fine with XP","body":"\u003cp\u003eI'm a final year student learning and trying to build a Java project and I've got a project in Java and MSAccess (jdbc). Th project is basically done but only executing fine under windows XP \u0026amp; jdk1.5. But I use windows7 64bit OS and installed jdk1.7. But I'm not able to login to the project. I've done those odbc - system dsn creation procedure both by *.mdb in Access02-03 \u0026amp; in \u003cem\u003e.mdb,\u003c/em\u003e.accdb. but having the same Exception \"unable to connect to the database\". The login gui is taking the value of UserName and password, but as I press the login button it's throwing the exception. It is created in netbeans, though I have the latest version of netbeans installed in my system.It is throwing the same Exception from the commandline as well from netbeans. I have checked all those codings, dsn name, tablenames, ield names, but we all know that Java is a completely platform independent language. So I think there won't be any issues with the version of OS or JDK installed on the system.\u003c/p\u003e","answer_count":"0","comment_count":"3","creation_date":"2012-02-15 18:14:32.923 UTC","last_activity_date":"2012-02-16 10:52:02.097 UTC","last_edit_date":"2012-02-16 10:52:02.097 UTC","last_editor_display_name":"","last_editor_user_id":"21234","owner_display_name":"","owner_user_id":"1211998","post_type_id":"1","score":"0","tags":"java|windows|ms-access","view_count":"35"} -{"id":"29514583","title":"FInd a document several times","body":"\u003cp\u003eI have a list of events in a city, and I display for the city all incoming events, sorted by the start date of the event.\u003c/p\u003e\n\n\u003cp\u003eToday I need to add a new feature : some events can be repeated over time, for exemple a flea all wednesday and friday during 2 month. In this case, I'll need to display this event X times.\u003c/p\u003e\n\n\u003cp\u003eE.g. what should be displayed on the timeline :\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eThe flea - today 2015-04-08 - id = 42\nJustin Bieber concert - today 2015-04-08 - id = 43\nAn other concert - thursday 2015-04-09 - id = 44\nThe flea - friday 2015-04-10 - id = 42\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eThe problem I have is that today, each document in Elasticsearch has the same \u003ccode\u003e_id\u003c/code\u003e than the one in MySQL.\u003c/p\u003e\n\n\u003cp\u003eI know i could stop using \u003ccode\u003e_id\u003c/code\u003e and add a \u003ccode\u003eidEvent\u003c/code\u003e field in the mapping, but this whould change a lot of things in the programm. Is there an elegant way to handle this problem ?\u003c/p\u003e\n\n\u003cp\u003eEdit :\nHere is a sample of my mapping :\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e\"event\": {\n \"properties\": {\n \"title\": {\n \"type\": \"string\"\n },\n \"dateStart\": {\n \"type\": \"date\",\n \"format\": \"yyyy-MM-dd\"\n },\n \"dateEnd\": {\n \"type\": \"date\",\n \"format\": \"yyyy-MM-dd\"\n }\n }\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eAnd I wonder if with something like that I would be able to display several times the event in results, according to its \u003ccode\u003edateStart\u003c/code\u003e :\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e\"event\": {\n \"properties\": {\n \"title\": {\n \"type\": \"string\"\n },\n \"dates\": {\n \"type\": \"nested\",\n \"properties\": {\n \"dateStart\": {\n \"type\": \"date\",\n \"format\": \"yyyy-MM-dd\"\n },\n \"dateEnd\": {\n \"type\": \"date\",\n \"format\": \"yyyy-MM-dd\"\n }\n }\n }\n }\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eRegards,\u003c/p\u003e","answer_count":"0","comment_count":"5","creation_date":"2015-04-08 12:26:36.48 UTC","last_activity_date":"2015-04-08 13:04:57.743 UTC","last_edit_date":"2015-04-08 13:04:57.743 UTC","last_editor_display_name":"","last_editor_user_id":"1219184","owner_display_name":"","owner_user_id":"1219184","post_type_id":"1","score":"0","tags":"elasticsearch","view_count":"58"} +{"id":"29514583","title":"FInd a document several times","body":"\u003cp\u003eI have a list of events in a city, and I display for the city all incoming events, sorted by the start date of the event.\u003c/p\u003e\n\n\u003cp\u003eToday I need to add a new feature : some events can be repeated over time, for exemple a flea all wednesday and friday during 2 month. In this case, I'll need to display this event X times.\u003c/p\u003e\n\n\u003cp\u003eE.g. what should be displayed on the timeline :\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eThe flea - today 2015-04-08 - id = 42\nJustin Bieber concert - today 2015-04-08 - id = 43\nAn other concert - thursday 2015-04-09 - id = 44\nThe flea - friday 2015-04-10 - id = 42\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eThe problem I have is that today, each document in OpenSearchhas the same \u003ccode\u003e_id\u003c/code\u003e than the one in MySQL.\u003c/p\u003e\n\n\u003cp\u003eI know i could stop using \u003ccode\u003e_id\u003c/code\u003e and add a \u003ccode\u003eidEvent\u003c/code\u003e field in the mapping, but this whould change a lot of things in the programm. Is there an elegant way to handle this problem ?\u003c/p\u003e\n\n\u003cp\u003eEdit :\nHere is a sample of my mapping :\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e\"event\": {\n \"properties\": {\n \"title\": {\n \"type\": \"string\"\n },\n \"dateStart\": {\n \"type\": \"date\",\n \"format\": \"yyyy-MM-dd\"\n },\n \"dateEnd\": {\n \"type\": \"date\",\n \"format\": \"yyyy-MM-dd\"\n }\n }\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eAnd I wonder if with something like that I would be able to display several times the event in results, according to its \u003ccode\u003edateStart\u003c/code\u003e :\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e\"event\": {\n \"properties\": {\n \"title\": {\n \"type\": \"string\"\n },\n \"dates\": {\n \"type\": \"nested\",\n \"properties\": {\n \"dateStart\": {\n \"type\": \"date\",\n \"format\": \"yyyy-MM-dd\"\n },\n \"dateEnd\": {\n \"type\": \"date\",\n \"format\": \"yyyy-MM-dd\"\n }\n }\n }\n }\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eRegards,\u003c/p\u003e","answer_count":"0","comment_count":"5","creation_date":"2015-04-08 12:26:36.48 UTC","last_activity_date":"2015-04-08 13:04:57.743 UTC","last_edit_date":"2015-04-08 13:04:57.743 UTC","last_editor_display_name":"","last_editor_user_id":"1219184","owner_display_name":"","owner_user_id":"1219184","post_type_id":"1","score":"0","tags":"opensearch","view_count":"58"} {"id":"20516073","title":"Sprintf of MAC address of available networks","body":"\u003cp\u003eI want to sprintf Mac address of some found networks in this area like this:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e `WiFi connection settings:\n MAC: 00 1E C0 10 3B 19\n SSID: css`\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003emy code is :\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003echar buf[32];\nBYTE MAC[64];\nint i;\n\nfor(i=1;i\u0026lt;15;i++)\n{ \n MyScanResults = WFScanList(i);\n sprintf(buf,\"%s\", MyScanResults.ssid);\n sprintf(\u0026amp;MAC[i*2],\"%02x\", MyScanResults.bssid[i]);\n _dbgwrite(\"SSID: \");\n _dbgwrite(buf);\n _dbgwrite(\"\\n\");\n _dbgwrite(\"MAC: \");\n _dbgwrite(MAC);\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eand Errors are :\u003c/p\u003e\n\n\u003cblockquote\u003e\n \u003cp\u003eC:\\Users\\h\\Desktop\\WiFi test\\taskFlyport.c:22: warning: pointer targets in passing argument 1 of 'sprintf' differ in signedness \u0026lt;\u003c/p\u003e\n \n \u003cp\u003eC:\\Users\\h\\Desktop\\WiFi test\\taskFlyport.c:27: warning: pointer targets in passing argument 1 of '_dbgwrite' differ in signedness\u0026lt;\u003c/p\u003e\n\u003c/blockquote\u003e\n\n\u003cp\u003eis there anyone to tell me where is my problem?\nthanks,regards\u003c/p\u003e","accepted_answer_id":"20516170","answer_count":"2","comment_count":"0","creation_date":"2013-12-11 10:02:51.133 UTC","last_activity_date":"2013-12-12 11:13:14.083 UTC","last_edit_date":"2013-12-11 10:19:09.09 UTC","last_editor_display_name":"","last_editor_user_id":"1859443","owner_display_name":"","owner_user_id":"2426420","post_type_id":"1","score":"1","tags":"c|printf|mac-address","view_count":"1586"} {"id":"43958478","title":"Images not rendering in Phabricator","body":"\u003cp\u003eI have phabricator installed on an EC2 instance. I have configured the application to point our CloudFront domain name. I also set up the s3 region, bucket-name and endpoint. However, I am unable to see the images after uploading through phabricator. In the inspect console, I am seeing a 403 Forbidden error to the path of the file in cloudfront. I am unable to verify if the file was uploaded into my s3 due to the path not being the s3 path.\u003c/p\u003e\n\n\u003cp\u003ePlease advise.\u003c/p\u003e","answer_count":"0","comment_count":"0","creation_date":"2017-05-13 22:14:57.337 UTC","last_activity_date":"2017-05-13 22:14:57.337 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"4400697","post_type_id":"1","score":"0","tags":"amazon-s3|amazon-cloudfront|phabricator","view_count":"19"} {"id":"35695775","title":"Dropzone, how to not process queue if errors exist","body":"\u003cp\u003eSo I have a form with Dropzone, plus another textarea, which I want to submit - if I insert an oversize file or too many I get the \"oversize\" error in the preview container, etc. BUT the form continues to process upon button clicking the form submit (due to my listener). How can I only submit if there file size is correct for both files and doesn't exceed max file limit? I can't see a Dropzone event for say \"no errors\" to add a click event listener - I think I'm close but semi stuck now, I have the below:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e$(function() {\n\nvar minImageWidth = 300, minImageHeight = 300;\n\nDropzone.options.jobApplicationUpload = {\n autoProcessQueue: false,\n addRemoveLinks: true,\n uploadMultiple: true,\n paramName: 'file',\n previewsContainer: '.dropzone-previews',\n acceptedFiles: '.pdf, .doc, .docx',\n maxFiles: 2,\n maxFilesize: 2, // MB \n dictDefaultMessage: '',\n clickable: '.fileinput-button',\n\n accept: function(file, done) { \n\n done();\n },\n\n // The setting up of the dropzone \n init: function() {\n var myDropzone = this; \n\n // First change the button to actually tell Dropzone to process the queue.\n this.element.querySelector(\"button[type=submit]\").addEventListener(\"click\", function(e) {\n\n // Make sure that the form isn't actually being sent.\n if(myDropzone.files.length \u0026gt; 0) {\n\n $('#job-application-container').hide();\n $('#spinner-modal').modal('show');\n $('#spinner-modal p').html('\u0026lt;b\u0026gt;Sending your application,\u0026lt;/b\u0026gt; please wait...\u0026lt;/p\u0026gt;'); \n\n e.preventDefault();\n e.stopPropagation();\n myDropzone.processQueue(); \n }\n\n });\n\n this.on(\"success\", function(files, response) {\n\n\n // Gets triggered when the files have successfully been sent.\n // Redirect user or notify of success.\n\n $('#job-application-container').hide();\n console.log('okay' + response);\n localStorage['success'] = 'test';\n location.reload();\n\n }); \n\n\n\n }\n\n};\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003e});\u003c/p\u003e","answer_count":"1","comment_count":"0","creation_date":"2016-02-29 09:21:01.413 UTC","favorite_count":"1","last_activity_date":"2016-03-26 21:46:21.9 UTC","last_edit_date":"2016-02-29 11:39:05.273 UTC","last_editor_display_name":"","last_editor_user_id":"3820348","owner_display_name":"","owner_user_id":"3820348","post_type_id":"1","score":"2","tags":"javascript|jquery|dropzone.js","view_count":"673"} @@ -2144,7 +2144,7 @@ {"id":"13655960","title":"Eclipse/Maven/Junit : junit throws classnotfound even though the compiled class is in test-classes folder","body":"\u003cp\u003eI recently upgraded my environment from Eclipse Ganymede to Eclipse Juno. My application was using the old maven-eclipse-plugin, so I had to make changes in the .classpath and .project and .settings files so that the m2e plugin in eclipse juno gets all the information correctly. I did this by following this link - \u003ca href=\"http://blog.frankel.ch/migrating-from-m2eclipse-to-m2e\" rel=\"nofollow\"\u003ehttp://blog.frankel.ch/migrating-from-m2eclipse-to-m2e\u003c/a\u003e \u003c/p\u003e\n\n\u003cp\u003eMy application runs perfectly fine using tomcat7 and maven also works fine.\nMy issues started when I tried to run a test as junit test in eclipse. This gives me a ClassNotFoundException. As a side note even if I add my test-classes folder as a classpath variable in eclipse, it still has issues because then it says it cannot find the resources folder. This very same environment worked perfectly fine with the earlier eclipse, maven plugin and classpath configuration. So I don't know what has changed. \u003c/p\u003e\n\n\u003cp\u003eI am sharing with you my project structure and classpath details. Please bear with me as the question is a bit long. \u003c/p\u003e\n\n\u003cul\u003e\n\u003cli\u003eXXX\n\u003cul\u003e\n\u003cli\u003eDBUnit (similar to Web)\u003c/li\u003e\n\u003cli\u003eOthers (similar to Web)\u003c/li\u003e\n\u003cli\u003eWeb\n\u003cul\u003e\n\u003cli\u003esrc/main/java\u003c/li\u003e\n\u003cli\u003esrc/main/resources\u003c/li\u003e\n\u003cli\u003esrc/test/java\u003c/li\u003e\n\u003cli\u003esrc/test/resources\u003c/li\u003e\n\u003cli\u003etarget/classes\u003c/li\u003e\n\u003cli\u003etarget/test-classes\u003c/li\u003e\n\u003cli\u003e.settings\u003c/li\u003e\n\u003cli\u003e.classpath\u003c/li\u003e\n\u003cli\u003e.project\u003c/li\u003e\n\u003c/ul\u003e\u003c/li\u003e\n\u003cli\u003etarget/classes\u003c/li\u003e\n\u003cli\u003e.settings\u003c/li\u003e\n\u003cli\u003e.classpath\u003c/li\u003e\n\u003cli\u003e.project\u003c/li\u003e\n\u003c/ul\u003e\u003c/li\u003e\n\u003c/ul\u003e\n\n\u003cp\u003eThe classpath entry under Web is as follows : \u003c/p\u003e\n\n\u003cpre class=\"lang-xml prettyprint-override\"\u003e\u003ccode\u003e\u0026lt;classpathentry kind=\"src\" output=\"target/classes\" path=\"src/main/java\"/\u0026gt;\n\u0026lt;classpathentry kind=\"src\" output=\"target/test-classes\" path=\"src/test/java\"/\u0026gt;\n\u0026lt;classpathentry excluding=\"**\" kind=\"src\" output=\"target/classes\" path=\"src/main/resources\"/\u0026gt;\n\u0026lt;classpathentry excluding=\"**\" kind=\"src\" output=\"target/test-classes\" path=\"src/test/resources\"/\u0026gt;\n\u0026lt;classpathentry kind=\"con\" path=\"org.eclipse.m2e.MAVEN2_CLASSPATH_CONTAINER\"/\u0026gt;\n\u0026lt;classpathentry kind=\"con\" path=\"org.eclipse.jdt.launching.JRE_CONTAINER\"/\u0026gt;\n\u0026lt;classpathentry kind=\"output\" path=\"target/classes\"/\u0026gt;\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eAnd the classpath entry under XXX is as follows : \u003c/p\u003e\n\n\u003cpre class=\"lang-xml prettyprint-override\"\u003e\u003ccode\u003e\u0026lt;classpathentry kind=\"src\" output=\"Others/target/classes\" path=\"Others/src/main/java\"/\u0026gt;\n\u0026lt;classpathentry kind=\"src\" path=\"DBUnit/src/main/java\"/\u0026gt;\n\u0026lt;classpathentry kind=\"src\" path=\"Web/src/main/java\"/\u0026gt;\n\u0026lt;classpathentry excluding=\"mock/\" kind=\"src\" output=\"Web/target/test-classes\" path=\"Web/src/test/java\"/\u0026gt;\n\u0026lt;classpathentry excluding=\"**\" kind=\"src\" output=\"Web/target/classes\" path=\"Web/src/main/resources\"/\u0026gt;\n\u0026lt;classpathentry kind=\"con\" path=\"org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6\"/\u0026gt;\n\u0026lt;classpathentry kind=\"con\" path=\"org.eclipse.m2e.MAVEN2_CLASSPATH_CONTAINER\"/\u0026gt;\n\u0026lt;classpathentry kind=\"var\" path=\"TOMCAT_HOME/lib/annotations-api.jar\"/\u0026gt;\n\u0026lt;classpathentry kind=\"var\" path=\"TOMCAT_HOME/lib/el-api.jar\"/\u0026gt;\n\u0026lt;classpathentry kind=\"var\" path=\"TOMCAT_HOME/lib/jasper.jar\"/\u0026gt;\n\u0026lt;classpathentry kind=\"var\" path=\"TOMCAT_HOME/lib/jsp-api.jar\"/\u0026gt;\n\u0026lt;classpathentry kind=\"var\" path=\"TOMCAT_HOME/lib/servlet-api.jar\"/\u0026gt;\n\u0026lt;classpathentry kind=\"output\" path=\"target/classes\"/\u0026gt;\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eSo when I clean the project eclipse does not place the main java classes under the the module1/target/classes folder and it also does not copy the resources folder under classes either.\u003c/p\u003e\n\n\u003cp\u003eI have searched around quite a bit regarding this problem. \u003c/p\u003e\n\n\u003cp\u003eOne solution also suggested to import the project into eclipse as a Maven project and update configuration. This splits my project into multiple modules and maven/eclipse throws me the exception - \"Path must include project and resource name\". I don't understand this error either.\u003c/p\u003e\n\n\u003cp\u003eAnother one suggested the removal of excluding=\"**\". I removed it but that did not help either.\u003c/p\u003e\n\n\u003cp\u003eIs there something wrong with the project structure? Does module1 require classpath and project files?\u003c/p\u003e\n\n\u003cp\u003ePlease help, I'll be really grateful. Thanks.\u003c/p\u003e\n\n\u003cp\u003e\u003cstrong\u003eUpdate 03rd Dec 2012\u003c/strong\u003e \u003c/p\u003e\n\n\u003cp\u003eThis is the exception - \u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eClass not found com.iei.gas.service.QuartzTestService\njava.lang.ClassNotFoundException: com.iei.gas.service.QuartzTestService \nat java.net.URLClassLoader$1.run(URLClassLoader.java:366)\nat java.net.URLClassLoader$1.run(URLClassLoader.java:355)\nat java.security.AccessController.doPrivileged(Native Method)\nat java.net.URLClassLoader.findClass(URLClassLoader.java:354)\nat java.lang.ClassLoader.loadClass(ClassLoader.java:423)\nat sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308)\nat java.lang.ClassLoader.loadClass(ClassLoader.java:356)\nat org.eclipse.jdt.internal.junit.runner.RemoteTestRunner.loadClass(RemoteTestRunner.java:693)\nat org.eclipse.jdt.internal.junit.runner.RemoteTestRunner.loadClasses(RemoteTestRunner.java:429)\nat org.eclipse.jdt.internal.junit.runner.RemoteTestRunner.runTests(RemoteTestRunner.java:452)\nat org.eclipse.jdt.internal.junit.runner.RemoteTestRunner.runTests(RemoteTestRunner.java:683)\nat org.eclipse.jdt.internal.junit.runner.RemoteTestRunner.run(RemoteTestRunner.java:390)\nat org.eclipse.jdt.internal.junit.runner.RemoteTestRunner.main(RemoteTestRunner.java:197)\n\u003c/code\u003e\u003c/pre\u003e","answer_count":"0","comment_count":"5","creation_date":"2012-12-01 03:07:15.967 UTC","last_activity_date":"2012-12-03 06:31:36.003 UTC","last_edit_date":"2012-12-03 06:31:36.003 UTC","last_editor_display_name":"","last_editor_user_id":"931293","owner_display_name":"","owner_user_id":"931293","post_type_id":"1","score":"1","tags":"eclipse|maven-2|junit4|m2eclipse|m2e","view_count":"1247"} {"id":"15479561","title":"Android App: Convert 3gp to mp3","body":"\u003cp\u003eMy soundfiles should be changed from 3gp to mp3.\u003c/p\u003e\n\n\u003cp\u003eI've tried to do this with ffmpeg:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003effmpeg -i input.3gp -vn -acodec libmp3lame -ab 64k output.mp3\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eBut the new mp3 file is only 0 KB big.\u003c/p\u003e\n\n\u003cp\u003eCould libmp3lame be the problem? \nIs it even possible to do that in Java?(since I only found c++ examples)\u003c/p\u003e","answer_count":"0","comment_count":"2","creation_date":"2013-03-18 14:42:48.477 UTC","last_activity_date":"2013-03-18 15:02:44.163 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"2043332","post_type_id":"1","score":"0","tags":"java|android|eclipse|ffmpeg|mp3","view_count":"689"} {"id":"16144470","title":"Get last_insert_id with singleton pattern","body":"\u003cp\u003eI was wondering about a thing when using the singleton pattern on a database connection class. \u003c/p\u003e\n\n\u003cp\u003eAs I understand it, the singleton pattern prevents the creation of more then 1 object of a given class that uses the pattern. \u003c/p\u003e\n\n\u003cp\u003eLets say I need the id from a row I just inserted which I get via the \u003ccode\u003emysqli::$insert_id\u003c/code\u003e.\nWhat if another use of the connection object was used to insert a row at the same time, might that result in a chance of returning a different id then the one expected or is it certain always to return the right id?\u003c/p\u003e\n\n\u003cp\u003eSorry for the newbie question, I have just been wondering whether there were a tiny chance on a multiuser application that getting the id this way might be inconsistent.\u003c/p\u003e\n\n\u003cp\u003eThanks in advance.\u003c/p\u003e","answer_count":"1","comment_count":"2","creation_date":"2013-04-22 10:08:43.537 UTC","last_activity_date":"2013-04-22 10:40:13.373 UTC","last_edit_date":"2013-04-22 10:40:13.373 UTC","last_editor_display_name":"","last_editor_user_id":"2269749","owner_display_name":"","owner_user_id":"649717","post_type_id":"1","score":"1","tags":"php|oop|singleton","view_count":"241"} -{"id":"19326117","title":"Elasticsearch Nest, parent/child relationship","body":"\u003cp\u003ecan you help me out to define a parent/child relationship using \u003ccode\u003eNESTclient\u003c/code\u003e for elasticsearch?\nmy code looks like this:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e[ElasticType(Name = \"type_properties\", DateDetection = true,.....)]\npublic class Properties{....}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003chr\u003e\n\n\u003cpre\u003e\u003ccode\u003e[ElasticType(Name = \"type_sales\", DateDetection = true, , ParentType = \"type_properties\")]\npublic class SalesHistory{....}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eI defined the parentType, but I don't see this sales documents related to a parent property.\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e{\n \"_index\": \"testparentchild\",\n \"_type\": \"type_sales\",\n \"_id\": \"dVd1tUJ0SNyoiSer7sNA\",\n \"_version\": 1,\n \"_score\": 1,\n \"_source\": {\n \"salesRecId\": 179504762,\n \"salesPrice\": 150000,\n \"salesDate\": \"2003-04-07T00:00:00\",\n }\n}\n\u003c/code\u003e\u003c/pre\u003e","accepted_answer_id":"19360482","answer_count":"1","comment_count":"0","creation_date":"2013-10-11 19:50:34.827 UTC","last_activity_date":"2016-12-09 09:58:28.047 UTC","last_edit_date":"2016-12-09 09:58:28.047 UTC","last_editor_display_name":"","last_editor_user_id":"6340959","owner_display_name":"","owner_user_id":"2824011","post_type_id":"1","score":"0","tags":"elasticsearch|nest","view_count":"1534"} +{"id":"19326117","title":"OpenSearchNest, parent/child relationship","body":"\u003cp\u003ecan you help me out to define a parent/child relationship using \u003ccode\u003eNESTclient\u003c/code\u003e for opensearch?\nmy code looks like this:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e[ElasticType(Name = \"type_properties\", DateDetection = true,.....)]\npublic class Properties{....}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003chr\u003e\n\n\u003cpre\u003e\u003ccode\u003e[ElasticType(Name = \"type_sales\", DateDetection = true, , ParentType = \"type_properties\")]\npublic class SalesHistory{....}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eI defined the parentType, but I don't see this sales documents related to a parent property.\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e{\n \"_index\": \"testparentchild\",\n \"_type\": \"type_sales\",\n \"_id\": \"dVd1tUJ0SNyoiSer7sNA\",\n \"_version\": 1,\n \"_score\": 1,\n \"_source\": {\n \"salesRecId\": 179504762,\n \"salesPrice\": 150000,\n \"salesDate\": \"2003-04-07T00:00:00\",\n }\n}\n\u003c/code\u003e\u003c/pre\u003e","accepted_answer_id":"19360482","answer_count":"1","comment_count":"0","creation_date":"2013-10-11 19:50:34.827 UTC","last_activity_date":"2016-12-09 09:58:28.047 UTC","last_edit_date":"2016-12-09 09:58:28.047 UTC","last_editor_display_name":"","last_editor_user_id":"6340959","owner_display_name":"","owner_user_id":"2824011","post_type_id":"1","score":"0","tags":"opensearch|nest","view_count":"1534"} {"id":"46334908","title":"Loading failed javacript file when deploy web application on weblogic","body":"\u003cp\u003ei have a problem with load javacript file on jsp page when deploy my web application on weblogic server. Before I deploy it on Tomcat 7 and it work normally.\u003c/p\u003e\n\n\u003cul\u003e\n\u003cli\u003eFirst I see on console window of firefox. My jsp page couldn't load js file on \u003cstrong\u003e\u003cem\u003e/resources/\u003c/em\u003e\u003c/strong\u003e folder (this folder is the same level with \u003cstrong\u003e\u003cem\u003e/WEB-INF/\u003c/em\u003e\u003c/strong\u003e):\u003c/li\u003e\n\u003c/ul\u003e\n\n\u003cblockquote\u003e\n \u003cp\u003eLoading failed for the \u003ccode\u003e\u0026lt;script\u0026gt;\u003c/code\u003e with source “http ://10.3.11.25:7001/resources/assets/global/plugins/jquery.min.js”. 10.3.11.25:7001:104\u003c/p\u003e\n\u003c/blockquote\u003e\n\n\u003cp\u003eImage i have capture:\n\u003ca href=\"https://i.stack.imgur.com/TJSLf.png\" rel=\"nofollow noreferrer\"\u003eConsole log of browser\u003c/a\u003e\u003c/p\u003e\n\n\u003cul\u003e\n\u003cli\u003eI try copy the url: “http ://10.3.11.25:7001/resources/assets/global/plugins/jquery.min.js” to address bar. \u003c/li\u003e\n\u003c/ul\u003e\n\n\u003cp\u003e=\u003e I can access it, but i only can download the js file (It not display the source code on browser as normally).\u003c/p\u003e\n\n\u003cp\u003e=\u003e What is my problem. I deploy my web application on weblogic 12c\u003c/p\u003e\n\n\u003cp\u003e\u003cstrong\u003eUPDATE:\u003c/strong\u003e\u003c/p\u003e\n\n\u003cul\u003e\n\u003cli\u003eNetwork tab load js file ok, all status is 200: \n\u003ca href=\"https://i.stack.imgur.com/E9qjw.png\" rel=\"nofollow noreferrer\"\u003eCapture image\u003c/a\u003e\u003c/li\u003e\n\u003cli\u003eSource code include on jsp:\u003c/li\u003e\n\u003c/ul\u003e\n\n\u003cp\u003e\u003ccode\u003e\u0026lt;script src=\"resources/assets/global/plugins/jquery.min.js\"\n type=\"text/javascript\"\u0026gt;\u0026lt;/script\u0026gt;\n\u0026lt;script src=\"resources/assets/global/plugins/jquery-migrate.min.js\"\n type=\"text/javascript\"\u0026gt;\u0026lt;/script\u0026gt;\u003c/code\u003e\u003c/p\u003e\n\n\u003cp\u003e\u003cstrong\u003eUPDATE 2:\u003c/strong\u003e\u003c/p\u003e\n\n\u003cul\u003e\n\u003cli\u003eAll status is 200 but load O KB and response is notthing\u003c/li\u003e\n\u003cli\u003eWhen i copy the js url to address bar it show popup download it (not display the source code as normally)\u003c/li\u003e\n\u003c/ul\u003e\n\n\u003cp\u003ePS: Sorry i can post more than 2 picture.\u003c/p\u003e","accepted_answer_id":"46356494","answer_count":"1","comment_count":"10","creation_date":"2017-09-21 03:17:09.437 UTC","last_activity_date":"2017-09-22 03:44:41.263 UTC","last_edit_date":"2017-09-21 03:40:25.14 UTC","last_editor_display_name":"","last_editor_user_id":"8645550","owner_display_name":"","owner_user_id":"8645550","post_type_id":"1","score":"0","tags":"javascript|deployment|weblogic12c","view_count":"70"} {"id":"16250969","title":"How to render Backbone el correctly into the view page","body":"\u003cp\u003eI'm trying to working correctly with my first \u003ccode\u003eBackbone\u003c/code\u003e app and trying to render it into my page.\u003c/p\u003e\n\n\u003cp\u003eI've wrote this app but I didn't got how I shoud put the app html rendered in the html view:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e\u0026lt;script type=\"text/javascript\"\u0026gt;\n$(function(){\n var SearchApp = new Search.Views.App({\n id:\"product-name-results\"\n });\n SearchApp.render();\n});\n\u0026lt;script\u0026gt;\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eThis is my app\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003evar Search = {\n Models: {},\n Collections: {},\n Views: {},\n Templates:{}\n}\n\nSearch.Views.App = Backbone.View.extend({\n initialize:function () {\n console.log('Search.Views.App initialize')\n }, \n render:function (options) {\n this.$el.html('hello world');\n }\n});\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eObviously this render method not appending in the \u003ccode\u003ehtml view\u003c/code\u003e, but how to append it into the view?\u003c/p\u003e","accepted_answer_id":"16251013","answer_count":"1","comment_count":"0","creation_date":"2013-04-27 10:27:02.6 UTC","last_activity_date":"2013-04-27 10:40:12.98 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"185921","post_type_id":"1","score":"0","tags":"backbone.js|render|el","view_count":"89"} {"id":"4836723","title":"Trouble with inserting notes into a JTree","body":"\u003cp\u003eI have a \u003ccode\u003eJTree\u003c/code\u003e which is constructed with the following method:\u003cbr\u003e\n(The \u003ccode\u003eBKMNode\u003c/code\u003e class extends \u003ccode\u003eDefaultMutableTreeNode\u003c/code\u003e, and the\u003ccode\u003eDataNode\u003c/code\u003e simply holds the data) \u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e void populateTree(BKMNode parent) {\n for (DataNode node : nodes) {\n BKMNode treeNode = new BKMNode(node.name,node.fullName,null);\n // check if this node was already added before\n if (! existingNodes.contains(ip + \".\" + node.fullName)) {\n existingNodes.add(ip + \".\" + node.fullName);\n DefaultTreeModel model = (DefaultTreeModel)tree.getModel();\n model.insertNodeInto(treeNode, parent, parent.getChildCount());\n System.out.println(\"adding \" + ip + \".\" + node.fullName);\n }\n node.populateTree(treeNode);\n }\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003e\u003ccode\u003e// some more non-relevant code...\u003c/code\u003e\u003c/p\u003e\n\n\u003cp\u003eWhen the tree is created at the application startup, everything is fine.\u003cbr\u003e\nBut once in a while my application adds nodes to the tree using the same method.\u003cbr\u003e\nWhen the application attempts to add a new node to the tree in does print the text, but nothing changes on the \u003ccode\u003eGUI\u003c/code\u003e.\u003cbr\u003e\nI tried calling \u003ccode\u003eJTree.invalidate()\u003c/code\u003e, \u003ccode\u003evalidate()\u003c/code\u003e, \u003ccode\u003erepaint()\u003c/code\u003e, \u003ccode\u003ereload()\u003c/code\u003e but nothing seems to help. \u003c/p\u003e\n\n\u003cp\u003eThe \u003ccode\u003epopulateTree\u003c/code\u003e method is always called from the \u003ccode\u003eEDT\u003c/code\u003e. \u003c/p\u003e\n\n\u003cp\u003eDoes anyone know what's the problems here? \u003c/p\u003e\n\n\u003cp\u003eThanks a lot in advance! \u003c/p\u003e","answer_count":"2","comment_count":"8","creation_date":"2011-01-29 11:59:40.373 UTC","favorite_count":"0","last_activity_date":"2014-06-25 11:28:14.557 UTC","last_edit_date":"2014-06-25 11:28:14.557 UTC","last_editor_display_name":"","last_editor_user_id":"3485434","owner_display_name":"","owner_user_id":"594926","post_type_id":"1","score":"0","tags":"java|swing|jtree","view_count":"1088"} @@ -2367,7 +2367,7 @@ {"id":"5328892","title":"Value of submit button clicked","body":"\u003cp\u003eThis should be really straight forward.\u003c/p\u003e\n\n\u003cp\u003eI'm checking if a form is being submitted using jquery. The form has multiple submit buttons with various values:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e\u0026lt;button type=\"submit\" value=\"foo\"\u0026gt;Foo\u0026lt;/button\u0026gt;\n\u0026lt;button type=\"submit\" value=\"bar\"\u0026gt;Bar\u0026lt;/button\u0026gt;\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eI would like to find the value of the button that just submitted the form:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e$(form).live('submit', function() {\n // Get value of submit button\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eThanks\u003c/p\u003e","accepted_answer_id":"5329081","answer_count":"6","comment_count":"0","creation_date":"2011-03-16 17:02:22.92 UTC","favorite_count":"4","last_activity_date":"2013-09-26 10:17:38.923 UTC","last_editor_display_name":"","owner_display_name":"user623520","post_type_id":"1","score":"12","tags":"jquery","view_count":"29496"} {"id":"39505455","title":"Strongname Signing IKVM PDFBox for Word Addin","body":"\u003cp\u003efor a Word Addin I am using PDFBox to manipulate PDFs. Or rather I would like to use it. I used it before with a self-created Desktop App. PDFBox is a Java Library that can be made usable as DLLs with IKVM (like here: \u003cstrong\u003e\u003ca href=\"http://www.squarepdf.net/pdfbox-in-net\" rel=\"nofollow\"\u003ehttp://www.squarepdf.net/pdfbox-in-net\u003c/a\u003e\u003c/strong\u003e ). The problem that I experience is that all libraries for Wordaddins must be strongnamed. I tried to strongname sign but then I get an exception that a part of apache common logging (in directory MANIFEST.MF) cannot be found.\nI know this is pretty vague so far and I will post more details later on, but maybe someone already went through this and has an idea on how to do this right or can point me to some place where there is already a strongnamed version of PDFBox.\nThanks in advance!\u003c/p\u003e","answer_count":"1","comment_count":"0","creation_date":"2016-09-15 07:37:22.127 UTC","last_activity_date":"2016-09-19 12:50:35.217 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"1106000","post_type_id":"1","score":"0","tags":"pdfbox|strongname|ikvm|word-addins","view_count":"37"} {"id":"13613664","title":"C: Error at accessing dynamically allocated elements","body":"\u003cp\u003eI am trying to write a function that searches a certain element. However, it exists with error when I try to access an element. I commented the rows that generate the error in \u003ccode\u003esearch\u003c/code\u003e function.\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e#include \u0026lt;stdlib.h\u0026gt;\n#include \u0026lt;stdio.h\u0026gt;\n#define m 2\n\ntypedef struct pag {\n int nr;\n int key[m];\n struct pag * child[m + 1];\n}* page;\n\npage init(page B) {\n int i;\n B = malloc(sizeof(struct pag));\n\n for (i = 0; i \u0026lt; m; i++) {\n B-\u0026gt;key[i] = 0;\n B-\u0026gt;child[i] = malloc(sizeof(struct pag));\n }\n B-\u0026gt;child[i] = malloc(sizeof(struct pag));\n return B;\n}\n\npage search(page B, int k) {\n int i;\n if (B == NULL )\n return B;\n // 1. cautare liniara\n for (i = 0; i \u0026lt; m; i++) {\n // 2. find the desired value\n if (B-\u0026gt;key[i] == k) {\n return B;\n // 3. find the value greater or equal, take the left road to the child\n } else if (B-\u0026gt;key[i] \u0026gt;= k) {\n return search(B-\u0026gt;child[i], k); //exists with error here\n }\n }\n\n return search(B-\u0026gt;child[i], k); //also exists with error here\n}\n\nint main() {\n page B = init(B);\n\n if (search(B, 2) == NULL )\n printf(\"Negasit\");\n else\n printf(\"Gasit\");\n\n return 0;\n}\n\u003c/code\u003e\u003c/pre\u003e","answer_count":"1","comment_count":"5","creation_date":"2012-11-28 20:18:04.017 UTC","last_activity_date":"2012-11-28 20:31:19.62 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"898390","post_type_id":"1","score":"-6","tags":"c","view_count":"31"} -{"id":"16045165","title":"Sum Query in Elasticsearch","body":"\u003cp\u003eI'm fairly new to Elasticsearch. I'm trying to write a query that will group by a field and calculate a sum. In SQL, my query would look like this:\n\u003ccode\u003eSELECT lane, SUM(routes) FROM lanes GROUP BY lane\u003c/code\u003e\u003c/p\u003e\n\n\u003cp\u003eI have this data that looks like this in ES:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e{\n \"_index\": \"kpi\",\n \"_type\": \"mroutes_by_lane\",\n \"_id\": \"TUeWFEhnS9q1Ukb2QdZABg\",\n \"_score\": 1.0,\n \"_source\": {\n \"warehouse_id\": 107,\n \"date\": \"2013-04-08\",\n \"lane\": \"M05\",\n \"routes\": 4047\n }\n},\n{\n \"_index\": \"kpi\",\n \"_type\": \"mroutes_by_lane\",\n \"_id\": \"owVmGW9GT562_2Alfru2DA\",\n \"_score\": 1.0,\n \"_source\": {\n \"warehouse_id\": 107,\n \"date\": \"2013-04-08\",\n \"lane\": \"M03\",\n \"routes\": 4065\n }\n},\n{\n \"_index\": \"kpi\",\n \"_type\": \"mroutes_by_lane\",\n \"_id\": \"JY9xNDxqSsajw76oMC2gxA\",\n \"_score\": 1.0,\n \"_source\": {\n \"warehouse_id\": 107,\n \"date\": \"2013-04-08\",\n \"lane\": \"M05\",\n \"routes\": 3056\n }\n},\n{\n \"_index\": \"kpi\",\n \"_type\": \"mroutes_by_lane\",\n \"_id\": \"owVmGW9GT345_2Alfru2DB\",\n \"_score\": 1.0,\n \"_source\": {\n \"warehouse_id\": 107,\n \"date\": \"2013-04-08\",\n \"lane\": \"M03\",\n \"routes\": 5675\n }\n},\n...\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eI want to essentially run the same query in ES as I did in SQL, so that my result would be something like (in json of course): \u003ccode\u003eM05: 7103, M03: 9740\u003c/code\u003e\u003c/p\u003e","accepted_answer_id":"16048098","answer_count":"1","comment_count":"0","creation_date":"2013-04-16 19:11:33.503 UTC","last_activity_date":"2016-12-07 16:15:18.933 UTC","last_edit_date":"2016-12-07 16:15:18.933 UTC","last_editor_display_name":"","last_editor_user_id":"6340959","owner_display_name":"","owner_user_id":"2242934","post_type_id":"1","score":"3","tags":"nosql|elasticsearch","view_count":"2760"} +{"id":"16045165","title":"Sum Query in OpenSearch","body":"\u003cp\u003eI'm fairly new to OpenSearch. I'm trying to write a query that will group by a field and calculate a sum. In SQL, my query would look like this:\n\u003ccode\u003eSELECT lane, SUM(routes) FROM lanes GROUP BY lane\u003c/code\u003e\u003c/p\u003e\n\n\u003cp\u003eI have this data that looks like this in opensearch:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e{\n \"_index\": \"kpi\",\n \"_type\": \"mroutes_by_lane\",\n \"_id\": \"TUeWFEhnS9q1Ukb2QdZABg\",\n \"_score\": 1.0,\n \"_source\": {\n \"warehouse_id\": 107,\n \"date\": \"2013-04-08\",\n \"lane\": \"M05\",\n \"routes\": 4047\n }\n},\n{\n \"_index\": \"kpi\",\n \"_type\": \"mroutes_by_lane\",\n \"_id\": \"owVmGW9GT562_2Alfru2DA\",\n \"_score\": 1.0,\n \"_source\": {\n \"warehouse_id\": 107,\n \"date\": \"2013-04-08\",\n \"lane\": \"M03\",\n \"routes\": 4065\n }\n},\n{\n \"_index\": \"kpi\",\n \"_type\": \"mroutes_by_lane\",\n \"_id\": \"JY9xNDxqSsajw76oMC2gxA\",\n \"_score\": 1.0,\n \"_source\": {\n \"warehouse_id\": 107,\n \"date\": \"2013-04-08\",\n \"lane\": \"M05\",\n \"routes\": 3056\n }\n},\n{\n \"_index\": \"kpi\",\n \"_type\": \"mroutes_by_lane\",\n \"_id\": \"owVmGW9GT345_2Alfru2DB\",\n \"_score\": 1.0,\n \"_source\": {\n \"warehouse_id\": 107,\n \"date\": \"2013-04-08\",\n \"lane\": \"M03\",\n \"routes\": 5675\n }\n},\n...\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eI want to essentially run the same query in opensearch as I did in SQL, so that my result would be something like (in json of course): \u003ccode\u003eM05: 7103, M03: 9740\u003c/code\u003e\u003c/p\u003e","accepted_answer_id":"16048098","answer_count":"1","comment_count":"0","creation_date":"2013-04-16 19:11:33.503 UTC","last_activity_date":"2016-12-07 16:15:18.933 UTC","last_edit_date":"2016-12-07 16:15:18.933 UTC","last_editor_display_name":"","last_editor_user_id":"6340959","owner_display_name":"","owner_user_id":"2242934","post_type_id":"1","score":"3","tags":"nosql|opensearch","view_count":"2760"} {"id":"582287","title":"nunit setup/teardown not working?","body":"\u003cp\u003eOk, I've got a strange problem. I am testing a usercontrol and have code like this:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e[TestFixture]\npublic myTestClass : UserControl\n{\n MyControl m_Control;\n\n [Test]\n public void TestMyControl()\n {\n m_Control = new MyControl();\n this.Controls.Add(m_Control);\n\n Assert.That(/*SomethingOrOther*/)\n }\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eThis works fine, but when I change it to:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e[TestFixture]\npublic myTestClass : UserControl\n{\n MyControl m_Control;\n\n [Setup]\n public void Setup()\n {\n m_Control = new MyControl();\n this.Controls.Add(m_Control);\n }\n\n [TearDown]\n public void TearDown()\n {\n this.Controls.Clear();\n }\n\n [Test]\n public void TestMyControl()\n {\n Assert.That(/*SomethingOrOther*/);\n }\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eI get an Object Reference Not Set To An Instance of an Object. I even output to the console to ensure that the setup/teardown were running at the correct times, and they were... but still it isn't newing up the usercontrols.\u003c/p\u003e\n\n\u003cp\u003eedit\u003e The exact code is:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e[TestFixture]\npublic class MoneyBoxTests : UserControl\n{\n private MoneyBox m_MoneyBox;\n private TextBox m_TextBox;\n\n #region \"Setup/TearDown\"\n [SetUp]\n public void Setup()\n {\n MoneyBox m_MoneyBox = new MoneyBox();\n TextBox m_TextBox = new TextBox();\n\n this.Controls.Add(m_MoneyBox);\n this.Controls.Add(m_TextBox);\n }\n\n [TearDown]\n public void TearDown()\n {\n this.Controls.Clear();\n }\n #endregion\n\n [Test]\n public void AmountConvertsToDollarsOnLeave()\n {\n m_MoneyBox.Focus();\n m_MoneyBox.Text = \"100\";\n m_TextBox.Focus();\n\n Assert.That(m_MoneyBox.Text, Is.EqualTo(\"$100.00\"), \"Text isn't $100.00\");\n }\n\n [Test]\n public void AmountStaysANumberAfterConvertToDollars()\n {\n m_MoneyBox.Focus();\n m_MoneyBox.Text = \"100\";\n m_TextBox.Focus();\n\n Assert.That(m_MoneyBox.Amount, Is.EqualTo(100), \"Amount isn't 100\");\n }\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eI get the exception(s) at the respective m_MoneyBox.Focus() calls.\u003c/p\u003e\n\n\u003cp\u003eSolved - See Joseph's comments\u003c/p\u003e","accepted_answer_id":"582364","answer_count":"3","comment_count":"0","creation_date":"2009-02-24 15:52:09.36 UTC","last_activity_date":"2013-01-20 17:07:44.553 UTC","last_edit_date":"2009-02-24 16:50:48.627 UTC","last_editor_display_name":"SnOrfus","last_editor_user_id":"48553","owner_display_name":"SnOrfus","owner_user_id":"48553","post_type_id":"1","score":"2","tags":"c#|unit-testing","view_count":"4518"} {"id":"24733176","title":"Error: can't find any special indices 2d (needs index) 2dsphere (needs index)","body":"\u003cp\u003eWhen i use this find command on heroku cloud server: \u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eusers.find({$and: [\n{loc: { $nearSphere: user.loc.coordinates,\n $maxDistance: user.dis / 3959}},\n{age: {$gte: user.age_down}},\n{age: {$lte: user.age_up}},\n{gender: user.interested},\n{interested: user.gender}]})\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003ei get the \u003ccode\u003ecan't find any special indices 2d (needs index) 2dsphere (needs index)\u003c/code\u003e Error.\u003c/p\u003e\n\n\u003cp\u003ethe results (in heroku) of the db.users.getIndexes() are:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003ePRIMARY\u0026gt; db.getIndexes()\n[\n {\n \"v\" : 1,\n \"key\" : {\n \"_id\" : 1\n },\n \"ns\" : \"app27206755.users\",\n \"name\" : \"_id_\"\n },\n {\n \"v\" : 1,\n \"key\" : {\n \"loc\" : \"2dsphere\"\n },\n \"ns\" : \"app27206755.users\",\n \"name\" : \"loc_2dsphere\",\n \"background\" : true\n }\n]\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003ethe weird thing is that in my local server it works perfectly (the same code exactly) but the problem is only on heroku.. i think that maybe i have a newer version of mongo on my local machin. the problem is that i dont know how to update the version of mongodb on the heroku cloud server, or if its even possible?\u003c/p\u003e\n\n\u003cp\u003e\u003cstrong\u003e\u003cem\u003eUpdate\u003c/em\u003e\u003c/strong\u003e: i just found that the version of mongo in heroku is 2.4.8 and in my local machine is 2.6.3.\nany idea how to upgrade ?\u003c/p\u003e\n\n\u003cp\u003ethanks !\u003c/p\u003e","answer_count":"0","comment_count":"5","creation_date":"2014-07-14 09:16:50.527 UTC","last_activity_date":"2014-07-14 11:46:20.857 UTC","last_edit_date":"2014-07-14 11:46:20.857 UTC","last_editor_display_name":"","last_editor_user_id":"3545212","owner_display_name":"","owner_user_id":"3545212","post_type_id":"1","score":"0","tags":"mongodb|heroku","view_count":"492"} {"id":"14936304","title":"using AND condition MYsql","body":"\u003cp\u003eWhat I need is: when I give training_id 172 AND training_id 174 it have to return user 150 only\u003cbr\u003e\n\u003cbr\u003e\n\u003cimg src=\"https://i.stack.imgur.com/bvgnH.png\" alt=\"enter image description here\"\u003e\u003c/p\u003e\n\n\u003cp\u003eI tried this but it doen't work\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eSELECT user_id FROM Training_users WHERE training_id = 172 AND training_id = 174\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eMost of the times training_id might be more than 2 \u003c/p\u003e","accepted_answer_id":"14936406","answer_count":"5","comment_count":"7","creation_date":"2013-02-18 12:27:28.553 UTC","last_activity_date":"2013-02-18 13:05:03.833 UTC","last_edit_date":"2013-02-18 12:57:51.307 UTC","last_editor_display_name":"","last_editor_user_id":"1626398","owner_display_name":"","owner_user_id":"1626398","post_type_id":"1","score":"0","tags":"php|mysql|cakephp|cakephp-1.3","view_count":"95"} @@ -2531,7 +2531,7 @@ {"id":"43847208","title":"PHP Memory Dump","body":"\u003cp\u003eI have a long execute PHP script that executes for months , In last version I have a memory leakage and I dont know where is problem , As I have many changes in last version I cannot manually check changes\u003c/p\u003e\n\n\u003cp\u003eNotice that is is long execute PHP script after 10 days , I see that the script consume about 5GB RAM (the previous version just consumed 280MB RAM ) , I want to dump memory after days to see what remains in RAM forever , How can I do this?\u003c/p\u003e","answer_count":"1","comment_count":"0","creation_date":"2017-05-08 12:04:25.9 UTC","last_activity_date":"2017-05-08 12:14:13.01 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"6447123","post_type_id":"1","score":"1","tags":"php|memory-leaks","view_count":"140"} {"id":"40613363","title":"Cannot convert source type, an explicit conversion exists","body":"\u003cp\u003eI have written a simple excample to clearify my Problem. This is the Code:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003einternal interface IAnimal\n{\n string Name { get; set; }\n}\n\nclass Animal : IAnimal\n{\n public string Name { get; set; }\n}\n\nclass Cow : Animal {}\nclass Sheep : Animal {}\n\n\ninternal interface IFarm\u0026lt;T\u0026gt; where T : IAnimal\n{\n T TheAnimal { get; }\n}\n\nclass Farm\u0026lt;T\u0026gt; : IFarm\u0026lt;T\u0026gt; where T : IAnimal\n{\n public T TheAnimal { get; }\n}\n\n\nclass CattleFarm : Farm\u0026lt;Cow\u0026gt; {}\nclass SheepFarm : Farm\u0026lt;Sheep\u0026gt; {}\n\n\nclass Demo\n{\n private IFarm\u0026lt;IAnimal\u0026gt; AnyFarm;\n void Foo()\n {\n AnyFarm = new CattleFarm();\n var name = AnyFarm.TheAnimal.Name;\n AnyFarm = new SheepFarm();\n name = AnyFarm.TheAnimal.Name;\n }\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eWhy do I get the compiler error when trying to assign CattleFarm or SheepFarm to AnyFarm?\u003c/p\u003e\n\n\u003cp\u003eWhere is my fault?\u003c/p\u003e","answer_count":"0","comment_count":"3","creation_date":"2016-11-15 15:13:43.767 UTC","last_activity_date":"2016-11-15 15:13:43.767 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"2037761","post_type_id":"1","score":"0","tags":"c#|syntax-error","view_count":"26"} {"id":"45720359","title":"How to parse through JSON data from URL in Swift 3","body":"\u003cp\u003eI am trying to loop through a JSON dictionary which is retrieved from POSTing to a URL. After retrieving the data and serializing it as a JSON object, I am unable to access the individual parts of the data.\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003efunc retrieveLotStatus(lotNumber: String) {\n let allowedChars = (CharacterSet(charactersIn: \"!*'();:@\u0026amp;=+$,/?%#-[] \").inverted)\n\n var url = URLRequest(url: URL(string: \"fakeURLHere\")!)\n let BodyData = \"lotNum=\" + lotNumber + \"\u0026amp;field=\" + PHASE.addingPercentEncoding(withAllowedCharacters: allowedChars)!\n url.httpMethod = \"POST\"\n url.httpBody = BodyData.data(using: .utf8)\n let task = URLSession.shared.dataTask(with: url) { data, response, error in\n guard let data = data, error == nil else {\n print(error?.localizedDescription ?? \"No Data\")\n return\n }\n let responseJSON = try? JSONSerialization.jsonObject(with: data, options: [])\n print(responseJSON)\n if let jsonData = responseJSON as? [String: Any] {\n if let ItemNumber = jsonData[\"ItemNumber\"] as? [String: Any] {\n print(ItemNumber)\n }\n }\n }\n task.resume()\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eThis is an example of what JSON data is being retrieved when printing the response for reference\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e Optional(\u0026lt;__NSArrayI 0x17d08c90\u0026gt;(\n{\n Count = 4;\n ItemNumber = 1;\n PercentComplete = \"100.00\";\n Total = 4;\n},\n{\n Count = 1;\n ItemNumber = 10;\n PercentComplete = \"100.00\";\n Total = 1;\n},\n{\n Count = 1;\n ItemNumber = 10a;\n PercentComplete = \"100.00\";\n Total = 1;\n},\n{\n Count = 1;\n ItemNumber = 11;\n PercentComplete = \"100.00\";\n Total = 1;\n},\n{\n Count = 1;\n ItemNumber = 11a;\n PercentComplete = \"100.00\";\n Total = 1;\n},\n{\n Count = 4;\n ItemNumber = 1a;\n PercentComplete = \"100.00\";\n Total = 4;\n},\n{\n Count = 2;\n ItemNumber = 2;\n PercentComplete = \"100.00\";\n Total = 2;\n},\n{\n Count = 2;\n ItemNumber = 2a;\n PercentComplete = \"100.00\";\n Total = 2;\n},\n{\n Count = 1;\n ItemNumber = 3;\n PercentComplete = \"100.00\";\n Total = 1;\n},\n{\n Count = 1;\n ItemNumber = 3a;\n PercentComplete = \"100.00\";\n Total = 1;\n},\n{\n Count = 1;\n ItemNumber = 4;\n PercentComplete = \"100.00\";\n Total = 1;\n},\n{\n Count = 1;\n ItemNumber = 4a;\n PercentComplete = \"100.00\";\n Total = 1;\n},\n{\n Count = 1;\n ItemNumber = 5;\n PercentComplete = \"100.00\";\n Total = 1;\n},\n{\n Count = 1;\n ItemNumber = 5a;\n PercentComplete = \"100.00\";\n Total = 1;\n},\n{\n Count = 1;\n ItemNumber = 6;\n PercentComplete = \"100.00\";\n Total = 1;\n},\n{\n Count = 1;\n ItemNumber = 6a;\n PercentComplete = \"100.00\";\n Total = 1;\n},\n{\n Count = 1;\n ItemNumber = 7;\n PercentComplete = \"100.00\";\n Total = 1;\n},\n{\n Count = 1;\n ItemNumber = 7a;\n PercentComplete = \"100.00\";\n Total = 1;\n},\n{\n Count = 1;\n ItemNumber = 8;\n PercentComplete = \"100.00\";\n Total = 1;\n},\n{\n Count = 1;\n ItemNumber = 8a;\n PercentComplete = \"100.00\";\n Total = 1;\n},\n{\n Count = 1;\n ItemNumber = 9;\n PercentComplete = \"100.00\";\n Total = 1;\n},\n{\n Count = 1;\n ItemNumber = 9a;\n PercentComplete = \"100.00\";\n Total = 1;\n}\n)\n)\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eI am trying to loop through each object and append the value to a table. However before I can get to formatting them as UITableCell, I am trying to simply read each responseJSON[\"ItemNumber\"], responseJSON[\"Count\"], responseJSON[\"Total\"] and responseJSON[\"PercentComplete\"]\u003c/p\u003e","accepted_answer_id":"45720724","answer_count":"1","comment_count":"1","creation_date":"2017-08-16 18:10:05.897 UTC","last_activity_date":"2017-08-16 18:34:48.133 UTC","last_edit_date":"2017-08-16 18:12:00.143 UTC","last_editor_display_name":"","last_editor_user_id":"6296515","owner_display_name":"","owner_user_id":"6296515","post_type_id":"1","score":"0","tags":"ios|json|swift","view_count":"57"} -{"id":"15190113","title":"OpenGL blending mode and depth buffer","body":"\u003cp\u003eThe read circle is nearer to the viewer and the texture has a transparent background.\u003c/p\u003e\n\n\u003cp\u003e(Both objects are squares with the same size, just different texture and x, z coords).\u003c/p\u003e\n\n\u003cp\u003eI want:\u003c/p\u003e\n\n\u003cp\u003e\u003cimg src=\"https://i.stack.imgur.com/w4uIT.png\" alt=\"enter image description here\"\u003e\u003c/p\u003e\n\n\u003cp\u003eBut I have:\u003c/p\u003e\n\n\u003cp\u003e\u003cimg src=\"https://i.stack.imgur.com/TfsRC.png\" alt=\"enter image description here\"\u003e\u003c/p\u003e\n\n\u003cp\u003eI know I have to do something with blending modes and maybe the depth buffer, but I don't know exactly what. Can someone help me?\u003c/p\u003e\n\n\u003cp\u003eThe current code to load the texture:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003epublic void initTexture(GL10 gl, Bitmap bitmap) {\n gl.glEnable(GL10.GL_BLEND);\n gl.glBlendFunc(GL10.GL_SRC_ALPHA, GL10.GL_ONE_MINUS_SRC_ALPHA);\n\n int[] texture = new int[1];\n gl.glGenTextures(1, texture, 0);\n\n textureId = texture[0];\n gl.glBindTexture(GL10.GL_TEXTURE_2D, textureId);\n\n gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_NEAREST);\n gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);\n\n gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_S, GL10.GL_REPEAT);\n gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_T, GL10.GL_REPEAT);\n\n GLUtils.texImage2D(GLES10.GL_TEXTURE_2D, 0, bitmap, 0);\n bitmap.recycle();\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eThe relevant part of drawing, for each of these objects:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e gl.glBindTexture(GL10.GL_TEXTURE_2D, textureId);\n\n gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);\n gl.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY);\n\n gl.glFrontFace(GL10.GL_CW);\n\n gl.glVertexPointer(3, GL10.GL_FLOAT, 0, verticesBuffer);\n gl.glTexCoordPointer(2, GL10.GL_FLOAT, 0, textureBuffer);\n\n gl.glDrawElements(GLES20.GL_TRIANGLES, indices.length, GLES10.GL_UNSIGNED_SHORT, indicesBuffer);\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eThanks!\u003c/p\u003e\n\n\u003cp\u003eNote: I'm using OpenGL ES 1.\u003c/p\u003e","answer_count":"3","comment_count":"0","creation_date":"2013-03-03 19:54:18.073 UTC","last_activity_date":"2013-03-04 15:19:02.307 UTC","last_edit_date":"2013-03-04 15:17:57.58 UTC","last_editor_display_name":"","last_editor_user_id":"752976","owner_display_name":"","owner_user_id":"930450","post_type_id":"1","score":"3","tags":"android|opengl-es","view_count":"865"} +{"id":"15190113","title":"OpenGL blending mode and depth buffer","body":"\u003cp\u003eThe read circle is nearer to the viewer and the texture has a transparent background.\u003c/p\u003e\n\n\u003cp\u003e(Both objects are squares with the same size, just different texture and x, z coords).\u003c/p\u003e\n\n\u003cp\u003eI want:\u003c/p\u003e\n\n\u003cp\u003e\u003cimg src=\"https://i.stack.imgur.com/w4uIT.png\" alt=\"enter image description here\"\u003e\u003c/p\u003e\n\n\u003cp\u003eBut I have:\u003c/p\u003e\n\n\u003cp\u003e\u003cimg src=\"https://i.stack.imgur.com/TfsRC.png\" alt=\"enter image description here\"\u003e\u003c/p\u003e\n\n\u003cp\u003eI know I have to do something with blending modes and maybe the depth buffer, but I don't know exactly what. Can someone help me?\u003c/p\u003e\n\n\u003cp\u003eThe current code to load the texture:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003epublic void initTexture(GL10 gl, Bitmap bitmap) {\n gl.glEnable(GL10.GL_BLEND);\n gl.glBlendFunc(GL10.GL_SRC_ALPHA, GL10.GL_ONE_MINUS_SRC_ALPHA);\n\n int[] texture = new int[1];\n gl.glGenTextures(1, texture, 0);\n\n textureId = texture[0];\n gl.glBindTexture(GL10.GL_TEXTURE_2D, textureId);\n\n gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_NEAREST);\n gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);\n\n gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_S, GL10.GL_REPEAT);\n gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_T, GL10.GL_REPEAT);\n\n GLUtils.texImage2D(GLES10.GL_TEXTURE_2D, 0, bitmap, 0);\n bitmap.recycle();\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eThe relevant part of drawing, for each of these objects:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e gl.glBindTexture(GL10.GL_TEXTURE_2D, textureId);\n\n gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);\n gl.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY);\n\n gl.glFrontFace(GL10.GL_CW);\n\n gl.glVertexPointer(3, GL10.GL_FLOAT, 0, verticesBuffer);\n gl.glTexCoordPointer(2, GL10.GL_FLOAT, 0, textureBuffer);\n\n gl.glDrawElements(GLES20.GL_TRIANGLES, indices.length, GLES10.GL_UNSIGNED_SHORT, indicesBuffer);\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eThanks!\u003c/p\u003e\n\n\u003cp\u003eNote: I'm using OpenGL opensearch 1.\u003c/p\u003e","answer_count":"3","comment_count":"0","creation_date":"2013-03-03 19:54:18.073 UTC","last_activity_date":"2013-03-04 15:19:02.307 UTC","last_edit_date":"2013-03-04 15:17:57.58 UTC","last_editor_display_name":"","last_editor_user_id":"752976","owner_display_name":"","owner_user_id":"930450","post_type_id":"1","score":"3","tags":"android|opengl-opensearch","view_count":"865"} {"id":"33992468","title":"Magento API with iOS application and Oauth 1.0 Login","body":"\u003cp\u003eI want to use \u003ccode\u003emagento API\u003c/code\u003e in my application to get product list, add to \ncart and purchase it from application etc. \u003c/p\u003e\n\n\u003cp\u003eI am very new to magento, I want to know that do I need to implement \u003ccode\u003eoauth 1.0\u003c/code\u003e login to use API from my iOS application?\u003c/p\u003e\n\n\u003cp\u003eAs per this link, I am assuming that I need to implement oauth 1.0 \u003ca href=\"http://devdocs.magento.com/guides/m1x/api/rest/introduction.html\" rel=\"nofollow\"\u003eMagento API\u003c/a\u003e\u003c/p\u003e\n\n\u003cp\u003eI have tried with this sample code \u003ca href=\"https://www.cocoacontrols.com/controls/simple-oauth-1-0a-client\" rel=\"nofollow\"\u003eOauth 1.0\u003c/a\u003e, I am able to get \u003ccode\u003eoauth_token\u003c/code\u003e and \u003ccode\u003eoauth_verifier\u003c/code\u003e.\u003c/p\u003e\n\n\u003cp\u003eBut in last step I want \u003ccode\u003eoauth_token\u003c/code\u003e and \u003ccode\u003eoauth_token_secret\u003c/code\u003e which I am not getting and instead of that I am getting html page in response. Not sure where I am wrong.\u003c/p\u003e\n\n\u003cp\u003eCan you please guide me am I going on right track to use magento API\nAnd to use \u003cstrong\u003eOauth 1.0\u003c/strong\u003e?\u003c/p\u003e\n\n\u003cp\u003eThanks.\u003c/p\u003e","answer_count":"0","comment_count":"1","creation_date":"2015-11-30 06:15:19.333 UTC","last_activity_date":"2015-11-30 07:47:04.07 UTC","last_edit_date":"2015-11-30 07:47:04.07 UTC","last_editor_display_name":"","last_editor_user_id":"3161009","owner_display_name":"","owner_user_id":"3161009","post_type_id":"1","score":"4","tags":"ios|objective-c|api|magento|oauth","view_count":"172"} {"id":"11685235","title":"Login using Python in basic HTML form","body":"\u003cblockquote\u003e\n \u003cp\u003e\u003cstrong\u003ePossible Duplicate:\u003c/strong\u003e\u003cbr\u003e\n \u003ca href=\"https://stackoverflow.com/questions/663490/python-how-do-you-login-to-a-page-and-view-the-resulting-page-in-a-browser\"\u003ePython: How do you login to a page and view the resulting page in a browser?\u003c/a\u003e \u003c/p\u003e\n\u003c/blockquote\u003e\n\n\n\n\u003cp\u003eI wanted to know how I can perform login's on pages like \u003ca href=\"http://www.deshabhimani.com/signin.php\" rel=\"nofollow noreferrer\"\u003ehttp://www.deshabhimani.com/signin.php\u003c/a\u003e which has a php-based login prompt using python. This form is used to login to \u003ca href=\"http://www.deshabhimani.com/epaper.php\" rel=\"nofollow noreferrer\"\u003ehttp://www.deshabhimani.com/epaper.php\u003c/a\u003e\u003c/p\u003e\n\n\u003cp\u003eThe site does not provide a HTTP API.\u003c/p\u003e\n\n\u003cp\u003eI want to later use python to download all the pages of the epaper(which are individual) and then make it into a final one file pdf. \u003c/p\u003e\n\n\u003cp\u003eThe file which I want to download is \u003ca href=\"http://www.deshabhimani.com/epaper.php?page=43210\u0026amp;ddate=27-07-2012\u0026amp;edition=Kochi\" rel=\"nofollow noreferrer\"\u003ehttp://www.deshabhimani.com/epaper.php?page=43210\u0026amp;ddate=27-07-2012\u0026amp;edition=Kochi\u003c/a\u003e which is only accessible by logging in\u003c/p\u003e","accepted_answer_id":"11685523","answer_count":"1","comment_count":"2","creation_date":"2012-07-27 09:41:45.053 UTC","last_activity_date":"2012-07-27 10:05:57.943 UTC","last_edit_date":"2017-05-23 12:03:16.507 UTC","last_editor_display_name":"","last_editor_user_id":"-1","owner_display_name":"","owner_user_id":"835277","post_type_id":"1","score":"3","tags":"python|login","view_count":"11857"} {"id":"7580894","title":"XmlTextWriter and null strings","body":"\u003cp\u003eI want to serialize an object where some of the member variables are of type \u003ccode\u003estring\u003c/code\u003e and have the value \u003ccode\u003enull\u003c/code\u003e.\nI use the following code:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e var writer = new XmlTextWriter(stream, Encoding.UTF8);\n writer.Formatting = Formatting.Indented;\n\n var s = new XmlSerializer(typeof(model.GetType())); \n s.Serialize(writer, model);\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eThe strings with value \u003ccode\u003enull\u003c/code\u003e don't appear in the Xml file (which is obviously intended behavior), although I don't want that. \nHow can I make \u003ccode\u003enull\u003c/code\u003e strings appear in the Xml file by overriding the \u003ccode\u003eXmlTextWriter\u003c/code\u003e class?\u003c/p\u003e\n\n\u003cp\u003eEDIT: I can't modify the object model that needs to be serialized, so Xml attributes are no option.\u003c/p\u003e","accepted_answer_id":"7580926","answer_count":"3","comment_count":"0","creation_date":"2011-09-28 09:12:34.91 UTC","last_activity_date":"2011-09-28 09:26:46.167 UTC","last_edit_date":"2011-09-28 09:18:24.22 UTC","last_editor_display_name":"","last_editor_user_id":"451540","owner_display_name":"","owner_user_id":"451540","post_type_id":"1","score":"1","tags":"c#|xml|xml-serialization","view_count":"1091"} @@ -2619,7 +2619,7 @@ {"id":"29371522","title":"Set div to width and height of child image?","body":"\u003cp\u003e\u003ca href=\"http://jsfiddle.net/1tbrtoaj/1/\" rel=\"nofollow\"\u003eJSFiddle\u003c/a\u003e\u003c/p\u003e\n\n\u003cp\u003eI have a div with class of container. The height of this div must equal the width. I have achieved this with:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e.container{\n background-color: #e6e6e6;\n position: relative;\n}\n\n.container:before{\n content: \"\";\n display: block;\n padding-top: 100%;\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eInside the container is an image holder, and inside this an image. The image must be constrained, it's height or width must not exceed the container and still maintain aspect ratio. This is achieved by:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eimg{\n position: absolute;\n top: 0;\n left: 0;\n bottom: 0;\n right: 0;\n\n max-height: 100%; \n max-width: 100%; \n width: auto;\n height: auto;\n\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eMy question concerns the image holder, I need this to be the same width and height as the image that is inside of it. How can I do this? Using CSS only please.\u003c/p\u003e","answer_count":"1","comment_count":"3","creation_date":"2015-03-31 14:47:22.257 UTC","last_activity_date":"2015-03-31 15:18:07.067 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"1013512","post_type_id":"1","score":"0","tags":"css|css3","view_count":"949"} {"id":"43474378","title":"Problems with \"Team Foundation Server is not your current Source Control plug-in\" when I try to start a new project in VS2017","body":"\u003cp\u003eI am new to Visual Studio 2017 and VSTS.\nFrom my VSTS page I choose to create a new application and clone it in Visual Studio.\nWhen I do this it launches Visual Studio. But then I get these error messages. How do I fix this?\n\u003ca href=\"https://i.stack.imgur.com/4XWt1.png\" rel=\"nofollow noreferrer\"\u003e\u003cimg src=\"https://i.stack.imgur.com/4XWt1.png\" alt=\"enter image description here\"\u003e\u003c/a\u003e\u003c/p\u003e","accepted_answer_id":"43486003","answer_count":"2","comment_count":"1","creation_date":"2017-04-18 14:01:04.477 UTC","last_activity_date":"2017-06-12 08:20:40.51 UTC","last_edit_date":"2017-04-18 14:16:49.017 UTC","last_editor_display_name":"","last_editor_user_id":"125673","owner_display_name":"","owner_user_id":"125673","post_type_id":"1","score":"0","tags":"git|visual-studio|vsts","view_count":"652"} {"id":"21687487","title":"Hide data, process to new div with each function","body":"\u003cp\u003eI have multiple same class divs that produce an array and a script that puts them into lists.\u003cbr\u003e\nI would like to hide a JSON array object from briefly flashing (unprocessed) on the page before the script can process it into lists.\u003c/p\u003e\n\n\u003cp\u003eSo I put them into a hidden div and the each function stops working.\u003c/p\u003e\n\n\u003cp\u003eThe .hid divs actually contain:\u003cbr\u003e\n\u003ccode\u003e\u0026lt;%=getCurrentAttribute('item','lists')%\u0026gt;\u003c/code\u003e that produce the JSON array.\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e\u0026lt;div class=\"hid\" style=\"display:none;\"\u0026gt;[{\"k\":\"Model\",\"v\":\"AB\"},{\"k\":\"Color\",\"v\":\"green\"}]\u0026lt;/div\u0026gt;\n\u0026lt;div class=\"overview\"\u0026gt;\u0026lt;/div\u0026gt;\n\u0026lt;div class=\"hid\" style=\"display:none;\"\u0026gt;[{\"k\":\"Model\",\"v\":\"AC\"},{\"k\":\"Color\",\"v\":\"blue\"}]\u0026lt;/div\u0026gt;\n\u0026lt;div class=\"overview\"\u0026gt;\u0026lt;/div\u0026gt;\n\u0026lt;div class=\"hid\" style=\"display:none;\"\u0026gt;[{\"k\":\"Model\",\"v\":\"AD\"},{\"k\":\"Color\",\"v\":\"red\"}]\u0026lt;/div\u0026gt;\n\u0026lt;div class=\"overview\"\u0026gt;\u0026lt;/div\u0026gt;\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eMy script\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003ejQuery('.hid').each(function () {\n var $data = jQuery(this), spec,\n specs = jQuery.parseJSON($data.html());\n jQuery(\".overview\").html('\u0026lt;div class=\"bullet_spec\"\u0026gt;\u0026lt;/div\u0026gt;');\n jQuery.each(specs, function () {\n jQuery(\".overview\").children('div').append('\u0026lt;div class=\"specs\"\u0026gt;\u0026lt;span class=\"label\"\u0026gt;' + this.k + ':\u0026lt;/span\u0026gt;\u0026lt;span class=\"value\"\u0026gt; ' + this.v + '\u0026lt;/span\u0026gt;\u0026lt;/div\u0026gt;');\n });\n { \n }\n});\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003e\u003ca href=\"http://jsfiddle.net/Qta2p/\" rel=\"nofollow\"\u003ehttp://jsfiddle.net/Qta2p/\u003c/a\u003e\u003c/p\u003e","answer_count":"0","comment_count":"3","creation_date":"2014-02-10 20:43:18.733 UTC","last_activity_date":"2014-02-10 21:46:41.487 UTC","last_edit_date":"2014-02-10 21:46:41.487 UTC","last_editor_display_name":"","last_editor_user_id":"369450","owner_display_name":"","owner_user_id":"2430319","post_type_id":"1","score":"0","tags":"javascript|jquery|arrays|json|each","view_count":"95"} -{"id":"42201046","title":"How to configure the publish address of elasticsearch 5.0 with CLI flags?","body":"\u003cp\u003eI've used elasticsearch 2.0 with start up flags to configure the publish_addres. I need the publish address to be configured, because I want to start elasticsearch in a docker container and access it from outside. So the publish address must be the IP of the docker host, which is in my case 192.168.99.100. I want to access elasticsearch on port 9201.\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003edocker run -d -p 9201:9201 --name elasticsearch_test elasticsearch:5.2-alpine elasticsearch -Enetwork.publish_host=\"192.168.99.100\" -Ehttp.port=\"9201\"\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003ewhich is like the old command\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003edocker run -d -p 9201:9201 --name elasticsearch_test elasticsearch:2.4.1 elasticsearch -Des.network.publish_host=\"192.168.99.100\" -Des.http.port=\"9201\"\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eBut when I start the container and look into the logs I don't get the publish address 192.168.99.100:9201, but 192.168.99.100:9300 and 172.17.0.2:9201. How can I force elasticsearch to use my combination of address and port?\u003c/p\u003e\n\n\u003cp\u003eThanks in advance\u003c/p\u003e\n\n\u003cp\u003eOutput of \u003ccode\u003edocker logs elasticsearch_test\u003c/code\u003e\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e[2017-02-13T09:17:03,095][INFO ][o.e.n.Node ] [] initializing ...\n[2017-02-13T09:17:03,252][INFO ][o.e.e.NodeEnvironment ] [ntIFoHQ] using [1] data paths, mounts [[/usr/share/elasticsearch/data (/dev/sda1)]], net usable_space [1gb], net total_space [17.8gb], spins? [possibly], types [ext4]\n[2017-02-13T09:17:03,252][INFO ][o.e.e.NodeEnvironment ] [ntIFoHQ] heap size [1.9gb], compressed ordinary object pointers [true]\n[2017-02-13T09:17:03,253][INFO ][o.e.n.Node ] node name [ntIFoHQ] derived from node ID [ntIFoHQnTAahC7_0cEt32Q]; set [node.name] to override\n[2017-02-13T09:17:03,257][INFO ][o.e.n.Node ] version[5.2.0], pid[1], build[24e05b9/2017-01-24T19:52:35.800Z], OS[Linux/4.4.43-boot2docker/amd64], JVM[Oracle Corporation/OpenJDK 64-Bit Server VM/1.8.0_111-internal/25.111-b14]\n[2017-02-13T09:17:05,249][INFO ][o.e.p.PluginsService ] [ntIFoHQ] loaded module [aggs-matrix-stats]\n[2017-02-13T09:17:05,250][INFO ][o.e.p.PluginsService ] [ntIFoHQ] loaded module [ingest-common]\n[2017-02-13T09:17:05,251][INFO ][o.e.p.PluginsService ] [ntIFoHQ] loaded module [lang-expression]\n[2017-02-13T09:17:05,251][INFO ][o.e.p.PluginsService ] [ntIFoHQ] loaded module [lang-groovy]\n[2017-02-13T09:17:05,251][INFO ][o.e.p.PluginsService ] [ntIFoHQ] loaded module [lang-mustache]\n[2017-02-13T09:17:05,251][INFO ][o.e.p.PluginsService ] [ntIFoHQ] loaded module [lang-painless]\n[2017-02-13T09:17:05,251][INFO ][o.e.p.PluginsService ] [ntIFoHQ] loaded module [percolator]\n[2017-02-13T09:17:05,251][INFO ][o.e.p.PluginsService ] [ntIFoHQ] loaded module [reindex]\n[2017-02-13T09:17:05,254][INFO ][o.e.p.PluginsService ] [ntIFoHQ] loaded module [transport-netty3]\n[2017-02-13T09:17:05,254][INFO ][o.e.p.PluginsService ] [ntIFoHQ] loaded module [transport-netty4]\n[2017-02-13T09:17:05,254][INFO ][o.e.p.PluginsService ] [ntIFoHQ] no plugins loaded\n[2017-02-13T09:17:05,677][WARN ][o.e.d.s.g.GroovyScriptEngineService] [groovy] scripts are deprecated, use [painless] scripts instead\n[2017-02-13T09:17:10,757][INFO ][o.e.n.Node ] initialized\n[2017-02-13T09:17:10,757][INFO ][o.e.n.Node ] [ntIFoHQ] starting ...\n[2017-02-13T09:17:11,015][WARN ][i.n.u.i.MacAddressUtil ] Failed to find a usable hardware address from the network interfaces; using random bytes: 07:0a:ef:37:62:95:b2:77\n[2017-02-13T09:17:11,198][INFO ][o.e.t.TransportService ] [ntIFoHQ] publish_address {192.168.99.100:9300}, bound_addresses {[::1]:9300}, {127.0.0.1:9300}\n[2017-02-13T09:17:11,203][INFO ][o.e.b.BootstrapChecks ] [ntIFoHQ] bound or publishing to a non-loopback or non-link-local address, enforcing bootstrap checks\n[2017-02-13T09:17:14,351][INFO ][o.e.c.s.ClusterService ] [ntIFoHQ] new_master {ntIFoHQ}{ntIFoHQnTAahC7_0cEt32Q}{cW1MZt0-RmutLXz_Tkm8mw}{192.168.99.100}{192.168.99.100:9300}, reason: zen-disco-elected-as-master ([0] nodes joined)\n[2017-02-13T09:17:14,395][INFO ][o.e.h.HttpServer ] [ntIFoHQ] publish_address {172.17.0.2:9201}, bound_addresses {[::]:9201}\n[2017-02-13T09:17:14,396][INFO ][o.e.n.Node ] [ntIFoHQ] started\n[2017-02-13T09:17:14,423][INFO ][o.e.g.GatewayService ] [ntIFoHQ] recovered [0] indices into cluster_state\n[2017-02-13T09:17:44,398][WARN ][o.e.c.r.a.DiskThresholdMonitor] [ntIFoHQ] high disk watermark [90%] exceeded on [ntIFoHQnTAahC7_0cEt32Q][ntIFoHQ][/usr/share/elasticsearch/data/nodes/0] free: 1gb[5.7%], shards will be relocated away from this node\n[2017-02-13T09:17:44,398][INFO ][o.e.c.r.a.DiskThresholdMonitor] [ntIFoHQ] rerouting shards: [high disk watermark exceeded on one or more nodes]\n[2017-02-13T09:18:14,434][WARN ][o.e.c.r.a.DiskThresholdMonitor] [ntIFoHQ] high disk watermark [90%] exceeded on [ntIFoHQnTAahC7_0cEt32Q][ntIFoHQ][/usr/share/elasticsearch/data/nodes/0] free: 1gb[5.7%], shards will be relocated away from this node\n[2017-02-13T09:18:44,438][WARN ][o.e.c.r.a.DiskThresholdMonitor] [ntIFoHQ] high disk watermark [90%] exceeded on [ntIFoHQnTAahC7_0cEt32Q][ntIFoHQ][/usr/share/elasticsearch/data/nodes/0] free: 1gb[5.7%], shards will be relocated away from this node\n[2017-02-13T09:18:44,438][INFO ][o.e.c.r.a.DiskThresholdMonitor] [ntIFoHQ] rerouting shards: [high disk watermark exceeded on one or more nodes]\n[2017-02-13T09:19:14,443][WARN ][o.e.c.r.a.DiskThresholdMonitor] [ntIFoHQ] high disk watermark [90%] exceeded on [ntIFoHQnTAahC7_0cEt32Q][ntIFoHQ][/usr/share/elasticsearch/data/nodes/0] free: 1gb[5.7%], shards will be relocated away from this node\n[2017-02-13T09:19:44,446][WARN ][o.e.c.r.a.DiskThresholdMonitor] [ntIFoHQ] high disk watermark [90%] exceeded on [ntIFoHQnTAahC7_0cEt32Q][ntIFoHQ][/usr/share/elasticsearch/data/nodes/0] free: 1gb[5.7%], shards will be relocated away from this node\n[2017-02-13T09:19:44,447][INFO ][o.e.c.r.a.DiskThresholdMonitor] [ntIFoHQ] rerouting shards: [high disk watermark exceeded on one or more nodes]\n[2017-02-13T09:20:14,453][WARN ][o.e.c.r.a.DiskThresholdMonitor] [ntIFoHQ] high disk watermark [90%] exceeded on [ntIFoHQnTAahC7_0cEt32Q][ntIFoHQ][/usr/share/elasticsearch/data/nodes/0] free: 1gb[5.7%], shards will be relocated away from this node\n[2017-02-13T09:20:44,459][WARN ][o.e.c.r.a.DiskThresholdMonitor] [ntIFoHQ] high disk watermark [90%] exceeded on [ntIFoHQnTAahC7_0cEt32Q][ntIFoHQ][/usr/share/elasticsearch/data/nodes/0] free: 1gb[5.7%], shards will be relocated away from this node\n[2017-02-13T09:20:44,459][INFO ][o.e.c.r.a.DiskThresholdMonitor] [ntIFoHQ] rerouting shards: [high disk watermark exceeded on one or more nodes]\n[2017-02-13T09:21:14,467][WARN ][o.e.c.r.a.DiskThresholdMonitor] [ntIFoHQ] high disk watermark [90%] exceeded on [ntIFoHQnTAahC7_0cEt32Q][ntIFoHQ][/usr/share/elasticsearch/data/nodes/0] free: 1gb[5.7%], shards will be relocated away from this node\n[2017-02-13T09:21:44,471][WARN ][o.e.c.r.a.DiskThresholdMonitor] [ntIFoHQ] high disk watermark [90%] exceeded on [ntIFoHQnTAahC7_0cEt32Q][ntIFoHQ][/usr/share/elasticsearch/data/nodes/0] free: 1gb[5.7%], shards will be relocated away from this node\n[2017-02-13T09:21:44,471][INFO ][o.e.c.r.a.DiskThresholdMonitor] [ntIFoHQ] rerouting shards: [high disk watermark exceeded on one or more nodes]\n[2017-02-13T09:22:14,482][WARN ][o.e.c.r.a.DiskThresholdMonitor] [ntIFoHQ] high disk watermark [90%] exceeded on [ntIFoHQnTAahC7_0cEt32Q][ntIFoHQ][/usr/share/elasticsearch/data/nodes/0] free: 1gb[5.7%], shards will be relocated away from this node\n[2017-02-13T09:22:44,485][WARN ][o.e.c.r.a.DiskThresholdMonitor] [ntIFoHQ] high disk watermark [90%] exceeded on [ntIFoHQnTAahC7_0cEt32Q][ntIFoHQ][/usr/share/elasticsearch/data/nodes/0] free: 1gb[5.7%], shards will be relocated away from this node\n[2017-02-13T09:22:44,485][INFO ][o.e.c.r.a.DiskThresholdMonitor] [ntIFoHQ] rerouting shards: [high disk watermark exceeded on one or more nodes]\n[2017-02-13T09:23:14,497][WARN ][o.e.c.r.a.DiskThresholdMonitor] [ntIFoHQ] high disk watermark [90%] exceeded on [ntIFoHQnTAahC7_0cEt32Q][ntIFoHQ][/usr/share/elasticsearch/data/nodes/0] free: 1gb[5.7%], shards will be relocated away from this node\n\u003c/code\u003e\u003c/pre\u003e","answer_count":"1","comment_count":"0","creation_date":"2017-02-13 09:48:55.953 UTC","last_activity_date":"2017-09-19 11:57:56.39 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"2000569","post_type_id":"1","score":"2","tags":"elasticsearch|docker|elasticsearch-5","view_count":"211"} +{"id":"42201046","title":"How to configure the publish address of opensearch 5.0 with CLI flags?","body":"\u003cp\u003eI've used opensearch 2.0 with start up flags to configure the publish_addres. I need the publish address to be configured, because I want to start opensearch in a docker container and access it from outside. So the publish address must be the IP of the docker host, which is in my case 192.168.99.100. I want to access opensearch on port 9201.\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003edocker run -d -p 9201:9201 --name opensearch_test opensearch:5.2-alpine opensearch -Enetwork.publish_host=\"192.168.99.100\" -Ehttp.port=\"9201\"\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003ewhich is like the old command\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003edocker run -d -p 9201:9201 --name opensearch_test opensearch:2.4.1 opensearch -Des.network.publish_host=\"192.168.99.100\" -Des.http.port=\"9201\"\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eBut when I start the container and look into the logs I don't get the publish address 192.168.99.100:9201, but 192.168.99.100:9300 and 172.17.0.2:9201. How can I force opensearch to use my combination of address and port?\u003c/p\u003e\n\n\u003cp\u003eThanks in advance\u003c/p\u003e\n\n\u003cp\u003eOutput of \u003ccode\u003edocker logs opensearch_test\u003c/code\u003e\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e[2017-02-13T09:17:03,095][INFO ][o.e.n.Node ] [] initializing ...\n[2017-02-13T09:17:03,252][INFO ][o.e.e.NodeEnvironment ] [ntIFoHQ] using [1] data paths, mounts [[/usr/share/opensearch/data (/dev/sda1)]], net usable_space [1gb], net total_space [17.8gb], spins? [possibly], types [ext4]\n[2017-02-13T09:17:03,252][INFO ][o.e.e.NodeEnvironment ] [ntIFoHQ] heap size [1.9gb], compressed ordinary object pointers [true]\n[2017-02-13T09:17:03,253][INFO ][o.e.n.Node ] node name [ntIFoHQ] derived from node ID [ntIFoHQnTAahC7_0cEt32Q]; set [node.name] to override\n[2017-02-13T09:17:03,257][INFO ][o.e.n.Node ] version[5.2.0], pid[1], build[24e05b9/2017-01-24T19:52:35.800Z], OS[Linux/4.4.43-boot2docker/amd64], JVM[Oracle Corporation/OpenJDK 64-Bit Server VM/1.8.0_111-internal/25.111-b14]\n[2017-02-13T09:17:05,249][INFO ][o.e.p.PluginsService ] [ntIFoHQ] loaded module [aggs-matrix-stats]\n[2017-02-13T09:17:05,250][INFO ][o.e.p.PluginsService ] [ntIFoHQ] loaded module [ingest-common]\n[2017-02-13T09:17:05,251][INFO ][o.e.p.PluginsService ] [ntIFoHQ] loaded module [lang-expression]\n[2017-02-13T09:17:05,251][INFO ][o.e.p.PluginsService ] [ntIFoHQ] loaded module [lang-groovy]\n[2017-02-13T09:17:05,251][INFO ][o.e.p.PluginsService ] [ntIFoHQ] loaded module [lang-mustache]\n[2017-02-13T09:17:05,251][INFO ][o.e.p.PluginsService ] [ntIFoHQ] loaded module [lang-painless]\n[2017-02-13T09:17:05,251][INFO ][o.e.p.PluginsService ] [ntIFoHQ] loaded module [percolator]\n[2017-02-13T09:17:05,251][INFO ][o.e.p.PluginsService ] [ntIFoHQ] loaded module [reindex]\n[2017-02-13T09:17:05,254][INFO ][o.e.p.PluginsService ] [ntIFoHQ] loaded module [transport-netty3]\n[2017-02-13T09:17:05,254][INFO ][o.e.p.PluginsService ] [ntIFoHQ] loaded module [transport-netty4]\n[2017-02-13T09:17:05,254][INFO ][o.e.p.PluginsService ] [ntIFoHQ] no plugins loaded\n[2017-02-13T09:17:05,677][WARN ][o.e.d.s.g.GroovyScriptEngineService] [groovy] scripts are deprecated, use [painless] scripts instead\n[2017-02-13T09:17:10,757][INFO ][o.e.n.Node ] initialized\n[2017-02-13T09:17:10,757][INFO ][o.e.n.Node ] [ntIFoHQ] starting ...\n[2017-02-13T09:17:11,015][WARN ][i.n.u.i.MacAddressUtil ] Failed to find a usable hardware address from the network interfaces; using random bytes: 07:0a:ef:37:62:95:b2:77\n[2017-02-13T09:17:11,198][INFO ][o.e.t.TransportService ] [ntIFoHQ] publish_address {192.168.99.100:9300}, bound_addresses {[::1]:9300}, {127.0.0.1:9300}\n[2017-02-13T09:17:11,203][INFO ][o.e.b.BootstrapChecks ] [ntIFoHQ] bound or publishing to a non-loopback or non-link-local address, enforcing bootstrap checks\n[2017-02-13T09:17:14,351][INFO ][o.e.c.s.ClusterService ] [ntIFoHQ] new_master {ntIFoHQ}{ntIFoHQnTAahC7_0cEt32Q}{cW1MZt0-RmutLXz_Tkm8mw}{192.168.99.100}{192.168.99.100:9300}, reason: zen-disco-elected-as-master ([0] nodes joined)\n[2017-02-13T09:17:14,395][INFO ][o.e.h.HttpServer ] [ntIFoHQ] publish_address {172.17.0.2:9201}, bound_addresses {[::]:9201}\n[2017-02-13T09:17:14,396][INFO ][o.e.n.Node ] [ntIFoHQ] started\n[2017-02-13T09:17:14,423][INFO ][o.e.g.GatewayService ] [ntIFoHQ] recovered [0] indices into cluster_state\n[2017-02-13T09:17:44,398][WARN ][o.e.c.r.a.DiskThresholdMonitor] [ntIFoHQ] high disk watermark [90%] exceeded on [ntIFoHQnTAahC7_0cEt32Q][ntIFoHQ][/usr/share/opensearch/data/nodes/0] free: 1gb[5.7%], shards will be relocated away from this node\n[2017-02-13T09:17:44,398][INFO ][o.e.c.r.a.DiskThresholdMonitor] [ntIFoHQ] rerouting shards: [high disk watermark exceeded on one or more nodes]\n[2017-02-13T09:18:14,434][WARN ][o.e.c.r.a.DiskThresholdMonitor] [ntIFoHQ] high disk watermark [90%] exceeded on [ntIFoHQnTAahC7_0cEt32Q][ntIFoHQ][/usr/share/opensearch/data/nodes/0] free: 1gb[5.7%], shards will be relocated away from this node\n[2017-02-13T09:18:44,438][WARN ][o.e.c.r.a.DiskThresholdMonitor] [ntIFoHQ] high disk watermark [90%] exceeded on [ntIFoHQnTAahC7_0cEt32Q][ntIFoHQ][/usr/share/opensearch/data/nodes/0] free: 1gb[5.7%], shards will be relocated away from this node\n[2017-02-13T09:18:44,438][INFO ][o.e.c.r.a.DiskThresholdMonitor] [ntIFoHQ] rerouting shards: [high disk watermark exceeded on one or more nodes]\n[2017-02-13T09:19:14,443][WARN ][o.e.c.r.a.DiskThresholdMonitor] [ntIFoHQ] high disk watermark [90%] exceeded on [ntIFoHQnTAahC7_0cEt32Q][ntIFoHQ][/usr/share/opensearch/data/nodes/0] free: 1gb[5.7%], shards will be relocated away from this node\n[2017-02-13T09:19:44,446][WARN ][o.e.c.r.a.DiskThresholdMonitor] [ntIFoHQ] high disk watermark [90%] exceeded on [ntIFoHQnTAahC7_0cEt32Q][ntIFoHQ][/usr/share/opensearch/data/nodes/0] free: 1gb[5.7%], shards will be relocated away from this node\n[2017-02-13T09:19:44,447][INFO ][o.e.c.r.a.DiskThresholdMonitor] [ntIFoHQ] rerouting shards: [high disk watermark exceeded on one or more nodes]\n[2017-02-13T09:20:14,453][WARN ][o.e.c.r.a.DiskThresholdMonitor] [ntIFoHQ] high disk watermark [90%] exceeded on [ntIFoHQnTAahC7_0cEt32Q][ntIFoHQ][/usr/share/opensearch/data/nodes/0] free: 1gb[5.7%], shards will be relocated away from this node\n[2017-02-13T09:20:44,459][WARN ][o.e.c.r.a.DiskThresholdMonitor] [ntIFoHQ] high disk watermark [90%] exceeded on [ntIFoHQnTAahC7_0cEt32Q][ntIFoHQ][/usr/share/opensearch/data/nodes/0] free: 1gb[5.7%], shards will be relocated away from this node\n[2017-02-13T09:20:44,459][INFO ][o.e.c.r.a.DiskThresholdMonitor] [ntIFoHQ] rerouting shards: [high disk watermark exceeded on one or more nodes]\n[2017-02-13T09:21:14,467][WARN ][o.e.c.r.a.DiskThresholdMonitor] [ntIFoHQ] high disk watermark [90%] exceeded on [ntIFoHQnTAahC7_0cEt32Q][ntIFoHQ][/usr/share/opensearch/data/nodes/0] free: 1gb[5.7%], shards will be relocated away from this node\n[2017-02-13T09:21:44,471][WARN ][o.e.c.r.a.DiskThresholdMonitor] [ntIFoHQ] high disk watermark [90%] exceeded on [ntIFoHQnTAahC7_0cEt32Q][ntIFoHQ][/usr/share/opensearch/data/nodes/0] free: 1gb[5.7%], shards will be relocated away from this node\n[2017-02-13T09:21:44,471][INFO ][o.e.c.r.a.DiskThresholdMonitor] [ntIFoHQ] rerouting shards: [high disk watermark exceeded on one or more nodes]\n[2017-02-13T09:22:14,482][WARN ][o.e.c.r.a.DiskThresholdMonitor] [ntIFoHQ] high disk watermark [90%] exceeded on [ntIFoHQnTAahC7_0cEt32Q][ntIFoHQ][/usr/share/opensearch/data/nodes/0] free: 1gb[5.7%], shards will be relocated away from this node\n[2017-02-13T09:22:44,485][WARN ][o.e.c.r.a.DiskThresholdMonitor] [ntIFoHQ] high disk watermark [90%] exceeded on [ntIFoHQnTAahC7_0cEt32Q][ntIFoHQ][/usr/share/opensearch/data/nodes/0] free: 1gb[5.7%], shards will be relocated away from this node\n[2017-02-13T09:22:44,485][INFO ][o.e.c.r.a.DiskThresholdMonitor] [ntIFoHQ] rerouting shards: [high disk watermark exceeded on one or more nodes]\n[2017-02-13T09:23:14,497][WARN ][o.e.c.r.a.DiskThresholdMonitor] [ntIFoHQ] high disk watermark [90%] exceeded on [ntIFoHQnTAahC7_0cEt32Q][ntIFoHQ][/usr/share/opensearch/data/nodes/0] free: 1gb[5.7%], shards will be relocated away from this node\n\u003c/code\u003e\u003c/pre\u003e","answer_count":"1","comment_count":"0","creation_date":"2017-02-13 09:48:55.953 UTC","last_activity_date":"2017-09-19 11:57:56.39 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"2000569","post_type_id":"1","score":"2","tags":"opensearch|docker|opensearch-5","view_count":"211"} {"id":"14044046","title":"Echo \"part\" of array (decoded from JSON)","body":"\u003cp\u003eI'am quite new to JSON and more \"advanced\" arrays. Therefore I don't know what I should search for...\u003c/p\u003e\n\n\u003cp\u003eI have this \"JSON array\" (what do you call it?):\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e{\n \"id\": \"321123321\",\n \"statuses\": {\n \"data\": [\n {\n \"message\": \"testmessage\",\n \"updated_time\": \"2012-12-25T16:33:29+0000\",\n \"id\": \"123321123\"\n }\n ],\n \"paging\": {\n \"previous\": \"1\",\n \"next\": \"1\"\n }\n }\n}​\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eI want to create a variable from \"message\" that is called $message and a variable from \"up_datedtime\" that is called $updated.\u003c/p\u003e\n\n\u003cp\u003eTo get id I simple:\n$json_a=json_decode($string,true);\n$id $json_a['id'];\u003c/p\u003e\n\n\u003cp\u003eAnd for statuses: \n$json_a=json_decode($string,true);\n$status = $json_a['id']['statuses'];\u003c/p\u003e\n\n\u003cp\u003eBut when I try to get \"message\" I get \" Cannot use string offset as an array in\":\n $message = $json_a['id']['statuses']['data']['message'];\u003c/p\u003e\n\n\u003cp\u003eHow do I get $message from the array the proper way?\u003c/p\u003e","answer_count":"1","comment_count":"0","creation_date":"2012-12-26 17:38:14.877 UTC","last_activity_date":"2012-12-26 17:47:45.887 UTC","last_edit_date":"2012-12-26 17:42:42.623 UTC","last_editor_display_name":"","last_editor_user_id":"1353011","owner_display_name":"","owner_user_id":"1930152","post_type_id":"1","score":"0","tags":"arrays|echo","view_count":"82"} {"id":"32348574","title":"How to calculate the sum of variables in PHP","body":"\u003cp\u003eIt calculates, but starting from the second row.\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e\u0026lt;?php\ninclude('connect-db.php');\n$query = \"select * from users\";\n$result = mysql_query($query); \n$row = mysql_fetch_array($result);\n$sold= array();\n\nwhile ($row = mysql_fetch_array($result, MYSQL_ASSOC)) { \n $sold=$row['contract']+$row['tva'];\n echo \"\u0026lt;table\u0026gt;\u0026lt;tr\u0026gt;\u0026lt;td\u0026gt;\" . $sold. \"\u0026lt;/td\u0026gt;\u0026lt;/tr\u0026gt;\u0026lt;/table\u0026gt;\";\n}\n?\u0026gt;\n\u003c/code\u003e\u003c/pre\u003e","accepted_answer_id":"32348651","answer_count":"3","comment_count":"1","creation_date":"2015-09-02 08:47:52.72 UTC","last_activity_date":"2015-09-02 09:06:09.85 UTC","last_edit_date":"2015-09-02 08:55:02.337 UTC","last_editor_display_name":"","last_editor_user_id":"2613662","owner_display_name":"","owner_user_id":"5291684","post_type_id":"1","score":"0","tags":"php|mysql","view_count":"110"} {"id":"46235057","title":"pandas DF column max difference between pair of values recursively","body":"\u003cp\u003eI have a DataFrame with a column 'col1' with integers in it.\nThe DF may have anything from 100 up to 1mln rows.\nHow to compute difference between pair of values in the col1 such as:\u003c/p\u003e\n\n\u003cp\u003erow2 - row1\nrow3 - row2\nrow4 - row3\netc\u003c/p\u003e\n\n\u003cp\u003eand return max difference? \u003c/p\u003e\n\n\u003cp\u003eI know how to use loc, iloc but do not know how to force it to go through pair of values and move to next pair\u003c/p\u003e","answer_count":"1","comment_count":"4","creation_date":"2017-09-15 08:28:10.18 UTC","last_activity_date":"2017-09-15 13:02:47.253 UTC","last_edit_date":"2017-09-15 09:23:21.173 UTC","last_editor_display_name":"","last_editor_user_id":"8144295","owner_display_name":"","owner_user_id":"5892612","post_type_id":"1","score":"0","tags":"python|pandas|dataframe|difference","view_count":"111"} @@ -2659,7 +2659,7 @@ {"id":"44181462","title":"Can I use ReportPortal with nightwatchjs?","body":"\u003cp\u003eCan I use ReportPortal with nightwatchjs + mocha?\nIf someone has experience, then tell in details please!\nP.s.: Now for reporting I use mochawesome-report-generator.\u003c/p\u003e","answer_count":"2","comment_count":"0","creation_date":"2017-05-25 13:10:51.99 UTC","last_activity_date":"2017-06-14 16:12:47.003 UTC","last_edit_date":"2017-05-25 14:11:13.973 UTC","last_editor_display_name":"","last_editor_user_id":"7787841","owner_display_name":"","owner_user_id":"7787841","post_type_id":"1","score":"0","tags":"reportportal","view_count":"30"} {"id":"37614239","title":"How to update angular 1.2 service to 1.5 in angular 2 style","body":"\u003cp\u003e\u003ca href=\"https://github.com/snapjay/ngCart/blob/master/src/ngCart.js#L30\" rel=\"nofollow\"\u003ehttps://github.com/snapjay/ngCart/blob/master/src/ngCart.js#L30\u003c/a\u003e\nI need to update this repo from 1.2 angular to 1.5 and in 2.0 in future\nI am start to upgrade this example from addToCart component\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eimport * as angular from 'angular';\nimport angularMeteor from 'angular-meteor';\n\nimport { name as ngCart } from '../../../api/ngCart/ngCart';\n\n\nimport './addToCart.html';\nclass AddToCart {\n constructor($scope, $reactive) {//, ngCart\n //ngCart object here should service return function?\n //angular_angular.js?hash=08f63d2…:13439 TypeError: _apiNgCartNgCart.name.getItemById is not a function\n 'ngInject';\n $reactive(this).attach($scope);\n\n if (this.inCart()) {\n this.q = ngCart.getItemById(this.id).getQuantity();\n } else {\n this.q = parseInt(this.quantity);\n }\n\n this.qtyOpt = [];\n for (var i = 1; i \u0026lt;= this.quantityMax; i++) {\n this.qtyOpt.push(i);\n }\n }\n\n inCart() {\n console.log(\"cart \" + ngCart);\n return ngCart.getItemById(this.id);\n }\n\n}\n\nconst name = 'addToCart';\n\n// create a module\nexport default angular.module(name, [\n angularMeteor,\n ngCart\n]).component(name, {\n templateUrl: `imports/ui/components/${name}/${name}.html`,\n controllerAs: name,\n bindings: {\n id: '@',\n name: '@',\n quantity: '@',\n quantityMax: '@',\n price: '@',\n data: '='\n },\n controller: AddToCart\n});\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eand it gives me following error \u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eTypeError: _apiNgCartNgCart.name.getItemById is not a function\n at AddToCart.inCart (addToCart.js:39)\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eand here ngCart service\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eimport { name as ngCartItem } from './ngCartItem';\nimport { name as store } from './store';\n\nclass NgCart {\n constructor($scope, $reactive, $window) {\n 'ngInject';\n $reactive(this).attach($scope);\n }\n\n $onInit() {\n // $rootScope.$on('ngCart:change', function(){ // i shouldn't user rooutscope here\n // ngCart.$save();\n // });\n if (angular.isObject(store.get('cart'))) {\n this.$restore(store.get('cart'));\n } else {\n this.init();\n }\n }\n\n init() {\n this.$cart = {\n shipping: null,\n taxRate: null,\n tax: null,\n items: []\n };\n };\n\n\n\n addItem(id, name, price, quantity, data) {\n\n var inCart = this.getItemById(id);\n\n if (typeof inCart === 'object') {\n //Update quantity of an item if it's already in the cart\n inCart.setQuantity(quantity, false);\n // $rootScope.$broadcast('ngCart:itemUpdated', inCart);\n } else {\n var newItem = new ngCartItem(id, name, price, quantity, data);\n this.$cart.items.push(newItem);\n // $rootScope.$broadcast('ngCart:itemAdded', newItem);\n }\n\n // $rootScope.$broadcast('ngCart:change', {});\n };\n\n getItemById(itemId) {\n var items = this.getCart().items;\n var build = false;\n\n angular.forEach(items, function (item) {\n if (item.getId() === itemId) {\n build = item;\n }\n });\n return build;\n };\n\n setShipping(shipping) {\n this.$cart.shipping = shipping;\n return this.getShipping();\n };\n\n getShipping() {\n if (this.getCart().items.length == 0) return 0;\n return this.getCart().shipping;\n };\n\n setTaxRate(taxRate) {\n this.$cart.taxRate = +parseFloat(taxRate).toFixed(2);\n return this.getTaxRate();\n };\n\n getTaxRate() {\n return this.$cart.taxRate\n };\n\n getTax() {\n return +parseFloat(((this.getSubTotal() / 100) * this.getCart().taxRate)).toFixed(2);\n };\n\n setCart(cart) {\n this.$cart = cart;\n return this.getCart();\n };\n\n getCart() {\n return this.$cart;\n };\n\n getItems() {\n return this.getCart().items;\n };\n\n getTotalItems() {\n var count = 0;\n var items = this.getItems();\n angular.forEach(items, function (item) {\n count += item.getQuantity();\n });\n return count;\n };\n\n getTotalUniqueItems() {\n return this.getCart().items.length;\n };\n\n getSubTotal() {\n var total = 0;\n angular.forEach(this.getCart().items, function (item) {\n total += item.getTotal();\n });\n return +parseFloat(total).toFixed(2);\n };\n\n totalCost() {\n return +parseFloat(this.getSubTotal() + this.getShipping() + this.getTax()).toFixed(2);\n };\n\n removeItem(index) {\n var item = this.$cart.items.splice(index, 1)[0] || {};\n // $rootScope.$broadcast('ngCart:itemRemoved', item);\n // $rootScope.$broadcast('ngCart:change', {});\n\n };\n\n removeItemById(id) {\n var item;\n var cart = this.getCart();\n angular.forEach(cart.items, function (item, index) {\n if (item.getId() === id) {\n item = cart.items.splice(index, 1)[0] || {};\n }\n });\n this.setCart(cart);\n // $rootScope.$broadcast('ngCart:itemRemoved', item);\n // $rootScope.$broadcast('ngCart:change', {});\n };\n\n empty() {\n\n // $rootScope.$broadcast('ngCart:change', {});\n this.$cart.items = [];\n $window.localStorage.removeItem('cart');\n };\n\n isEmpty() {\n\n return (this.$cart.items.length \u0026gt; 0 ? false : true);\n\n };\n\n toObject() {\n\n if (this.getItems().length === 0) return false;\n\n var items = [];\n angular.forEach(this.getItems(), function (item) {\n items.push(item.toObject());\n });\n\n return {\n shipping: this.getShipping(),\n tax: this.getTax(),\n taxRate: this.getTaxRate(),\n subTotal: this.getSubTotal(),\n totalCost: this.totalCost(),\n items: items\n }\n };\n\n\n $restore(storedCart) {\n var _self = this;\n _self.init();\n _self.$cart.shipping = storedCart.shipping;\n _self.$cart.tax = storedCart.tax;\n\n angular.forEach(storedCart.items, function (item) {\n _self.$cart.items.push(new ngCartItem(item._id, item._name, item._price, item._quantity, item._data));\n });\n this.$save();\n };\n\n $save() {\n return store.set('cart', JSON.stringify(this.getCart()));\n }\n\n\n\n}\n\nconst name = 'ngCart';\n\n// create a module\nexport default angular.module(name, [\n angularMeteor,\n ngCartItem,\n store\n]).service(name, {\n controllerAs: name,\n controller: NgCart\n});\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eHow to import service in 1.5?\nI am using angular-meteor and followed \u003ca href=\"http://www.angular-meteor.com/tutorials/socially/angular1/bootstrapping\" rel=\"nofollow\"\u003ethis\u003c/a\u003e tutorial\u003c/p\u003e\n\n\u003cp\u003eAnd also there can't be scope in service \n// This controller throws an unknown provider error because\n// a scope object cannot be injected into a service.\u003c/p\u003e\n\n\u003cblockquote\u003e\n \u003cp\u003e\u003ca href=\"https://docs.angularjs.org/error/\" rel=\"nofollow\"\u003ehttps://docs.angularjs.org/error/\u003c/a\u003e$injector/unpr?p0=$scopeProvider%20%3C-%20$scope%20%3C-%20ngCart\u003c/p\u003e\n\u003c/blockquote\u003e","accepted_answer_id":"37875647","answer_count":"2","comment_count":"0","creation_date":"2016-06-03 12:19:01.233 UTC","last_activity_date":"2016-06-17 07:31:18.693 UTC","last_edit_date":"2016-06-03 13:21:54.92 UTC","last_editor_display_name":"","last_editor_user_id":"880709","owner_display_name":"","owner_user_id":"880709","post_type_id":"1","score":"4","tags":"angularjs|angular-meteor","view_count":"170"} {"id":"21088407","title":"Perfectly linear spacing between gridview items","body":"\u003cp\u003eI am using following code to show the GirdView items.\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e\u0026lt;GridView\n android:id=\"@+id/gridView\"\n android:gravity=\"center_horizontal\"\n android:layout_below=\"@+id/searchLayout\"\n android:layout_width=\"match_parent\"\n android:layout_height=\"match_parent\"\n android:horizontalSpacing=\"10dp\"\n android:numColumns=\"3\"\n android:stretchMode=\"columnWidth\"\n android:verticalSpacing=\"10dp\" /\u0026gt;\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eEach GridItem (ImageView) is of size \u003ccode\u003e92dp\u003c/code\u003e\u003c/p\u003e\n\n\u003cp\u003eWhat i want is to show only 3 Columns or 3 images each Row and each Top, bottom ,left right all needs to be perfectly aligned and equal.\u003c/p\u003e\n\n\u003cp\u003eBelow is the result of above code.\u003c/p\u003e\n\n\u003cp\u003e\u003cimg src=\"https://i.stack.imgur.com/HTKxv.png\" alt=\"enter image description here\"\u003e\u003c/p\u003e\n\n\u003cp\u003eIt can be seen that spaces on left and right of the grid are very less as compared with the ones in between images and also between rows are very small.\u003c/p\u003e\n\n\u003cp\u003eSecondly, I am using 92dp. above is the result of S3, but when i use small screen the 3rd image doesn't get fit like in 320 dp screen. \u003c/p\u003e\n\n\u003cp\u003eShouldn't using \"dp\" automatically adjust according to screen size?\u003c/p\u003e","answer_count":"6","comment_count":"1","creation_date":"2014-01-13 10:01:38.767 UTC","last_activity_date":"2015-02-10 11:45:16.64 UTC","last_edit_date":"2014-02-07 12:55:13.75 UTC","last_editor_display_name":"","last_editor_user_id":"1528942","owner_display_name":"","owner_user_id":"1921872","post_type_id":"1","score":"3","tags":"java|android|android-layout","view_count":"1965"} -{"id":"10846066","title":"OpenGL ES snapshot for multi-sampling giving odd colors in iOS","body":"\u003cp\u003eWhen trying to get opengl view snapshot as UIImage for multi-sampling the image colors are different.\u003c/p\u003e\n\n\u003cp\u003eWhen multi-sampling is off, it is proper.\nThis is how I am taking the snapshot:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e- (UIImage*)snapshot\n{\n\n GLint backingWidth, backingHeight;\n\n backingWidth = framebufferWidth;\n backingHeight = framebufferHeight;\n\n NSInteger myDataLength = backingWidth * backingHeight * 4;\n\n GLubyte *buffer = (GLubyte *) malloc(myDataLength);\n glReadPixels(0, 0, backingWidth, backingHeight, GL_RGBA, GL_UNSIGNED_BYTE, buffer);\n\n // gl renders \"upside down\" so swap top to bottom into new array.\n GLubyte *buffer2 = (GLubyte *) malloc(myDataLength);\n\n for(int y = 0; y \u0026lt; backingHeight; y++) {\n for(int x = 0; x \u0026lt; backingWidth * 4; x++) {\n buffer2[y*4*backingWidth + x] = buffer[(backingHeight - y -1 ) * backingWidth * 4 + x];\n }\n }\n // make data provider with data.\n CGDataProviderRef provider = CGDataProviderCreateWithData(NULL, buffer2, myDataLength, myProviderReleaseData);\n\n // prep the ingredients\n int bitsPerComponent = 8;\n int bitsPerPixel = 32;\n int bytesPerRow = 4 * backingWidth;\n CGColorSpaceRef colorSpaceRef = CGColorSpaceCreateDeviceRGB();\n CGBitmapInfo bitmapInfo = kCGBitmapByteOrderDefault | kCGImageAlphaPremultipliedLast ;\n CGColorRenderingIntent renderingIntent = kCGRenderingIntentDefault;\n\n // make the cgimage\n CGImageRef imageRef = CGImageCreate(backingWidth, backingHeight, bitsPerComponent, bitsPerPixel, bytesPerRow, colorSpaceRef, bitmapInfo, provider, NULL, NO, renderingIntent);\n\n // then make the uiimage from that\n UIImage *image1 = [UIImage imageWithCGImage:imageRef];\n\n CGImageRelease(imageRef);\n CGColorSpaceRelease(colorSpaceRef);\n CGDataProviderRelease(provider);\n free(buffer);\n\n return image1;\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eHere are the result of taking the snapshot :\u003c/p\u003e\n\n\u003cp\u003e\u003cimg src=\"https://i.stack.imgur.com/VCJpr.png\" alt=\"opengl view\"\u003e\u003c/p\u003e\n\n\u003cp\u003e\u003cimg src=\"https://i.stack.imgur.com/TYEfJ.png\" alt=\"snapshot of the same view\"\u003e\u003c/p\u003e\n\n\u003cp\u003eFirst one is the opengl view I am drawing and second image is the snapshot of the image I am getting for the above mentioned code. \u003c/p\u003e\n\n\u003cp\u003eI am not using GLKit framework. want to know why multisampling is messing up the snapshot.\u003c/p\u003e","accepted_answer_id":"10846373","answer_count":"1","comment_count":"0","creation_date":"2012-06-01 07:20:05.307 UTC","last_activity_date":"2012-06-01 14:48:24.51 UTC","last_edit_date":"2012-06-01 14:48:24.51 UTC","last_editor_display_name":"","last_editor_user_id":"44729","owner_display_name":"","owner_user_id":"515915","post_type_id":"1","score":"0","tags":"ios|opengl-es|snapshot","view_count":"413"} +{"id":"10846066","title":"OpenGL opensearch snapshot for multi-sampling giving odd colors in iOS","body":"\u003cp\u003eWhen trying to get opengl view snapshot as UIImage for multi-sampling the image colors are different.\u003c/p\u003e\n\n\u003cp\u003eWhen multi-sampling is off, it is proper.\nThis is how I am taking the snapshot:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e- (UIImage*)snapshot\n{\n\n GLint backingWidth, backingHeight;\n\n backingWidth = framebufferWidth;\n backingHeight = framebufferHeight;\n\n NSInteger myDataLength = backingWidth * backingHeight * 4;\n\n GLubyte *buffer = (GLubyte *) malloc(myDataLength);\n glReadPixels(0, 0, backingWidth, backingHeight, GL_RGBA, GL_UNSIGNED_BYTE, buffer);\n\n // gl renders \"upside down\" so swap top to bottom into new array.\n GLubyte *buffer2 = (GLubyte *) malloc(myDataLength);\n\n for(int y = 0; y \u0026lt; backingHeight; y++) {\n for(int x = 0; x \u0026lt; backingWidth * 4; x++) {\n buffer2[y*4*backingWidth + x] = buffer[(backingHeight - y -1 ) * backingWidth * 4 + x];\n }\n }\n // make data provider with data.\n CGDataProviderRef provider = CGDataProviderCreateWithData(NULL, buffer2, myDataLength, myProviderReleaseData);\n\n // prep the ingredients\n int bitsPerComponent = 8;\n int bitsPerPixel = 32;\n int bytesPerRow = 4 * backingWidth;\n CGColorSpaceRef colorSpaceRef = CGColorSpaceCreateDeviceRGB();\n CGBitmapInfo bitmapInfo = kCGBitmapByteOrderDefault | kCGImageAlphaPremultipliedLast ;\n CGColorRenderingIntent renderingIntent = kCGRenderingIntentDefault;\n\n // make the cgimage\n CGImageRef imageRef = CGImageCreate(backingWidth, backingHeight, bitsPerComponent, bitsPerPixel, bytesPerRow, colorSpaceRef, bitmapInfo, provider, NULL, NO, renderingIntent);\n\n // then make the uiimage from that\n UIImage *image1 = [UIImage imageWithCGImage:imageRef];\n\n CGImageRelease(imageRef);\n CGColorSpaceRelease(colorSpaceRef);\n CGDataProviderRelease(provider);\n free(buffer);\n\n return image1;\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eHere are the result of taking the snapshot :\u003c/p\u003e\n\n\u003cp\u003e\u003cimg src=\"https://i.stack.imgur.com/VCJpr.png\" alt=\"opengl view\"\u003e\u003c/p\u003e\n\n\u003cp\u003e\u003cimg src=\"https://i.stack.imgur.com/TYEfJ.png\" alt=\"snapshot of the same view\"\u003e\u003c/p\u003e\n\n\u003cp\u003eFirst one is the opengl view I am drawing and second image is the snapshot of the image I am getting for the above mentioned code. \u003c/p\u003e\n\n\u003cp\u003eI am not using GLKit framework. want to know why multisampling is messing up the snapshot.\u003c/p\u003e","accepted_answer_id":"10846373","answer_count":"1","comment_count":"0","creation_date":"2012-06-01 07:20:05.307 UTC","last_activity_date":"2012-06-01 14:48:24.51 UTC","last_edit_date":"2012-06-01 14:48:24.51 UTC","last_editor_display_name":"","last_editor_user_id":"44729","owner_display_name":"","owner_user_id":"515915","post_type_id":"1","score":"0","tags":"ios|opengl-opensearch|snapshot","view_count":"413"} {"id":"47534469","title":"geting server IP that post data","body":"\u003cp\u003eI am POSTing some data from one server to another using following code:\u003c/p\u003e\n\n\u003cblockquote\u003e\n\u003cpre\u003e\u003ccode\u003e $data2 = http_build_query(\n array(\n 'desc' =\u0026gt; $desc\n ) );\n $options = array('http' =\u0026gt;\n array(\n 'method' =\u0026gt; 'POST',\n 'header' =\u0026gt; 'Content-type: application/x-www-form-urlencoded',\n 'content' =\u0026gt; $data2\n ) );\n $context = stream_context_create($options); // post request $result = file_get_contents($payment_url, false, $context);\n $result = @json_decode($result, true);\n\u003c/code\u003e\u003c/pre\u003e\n\u003c/blockquote\u003e\n\n\u003cp\u003eI would like to ensure that following POST comming from correct server so I have to check server IP posting this data. but $_SERVER['SERVER_ADDR'] give me wrong IP (actually give me destination`s server IP instead Posting server IP).\u003c/p\u003e","answer_count":"1","comment_count":"4","creation_date":"2017-11-28 14:52:06.933 UTC","last_activity_date":"2017-11-28 15:10:35.45 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"1081526","post_type_id":"1","score":"0","tags":"php|http-post","view_count":"23"} {"id":"24788335","title":"Maven alternate settings.xml when don't have option of mvn -s","body":"\u003cp\u003eWe're working with an integration partner who's given us a set of bash scripts, which then call Maven projects/targets. I'm trying to let our (shared) Jenkins server build those projects. In Maven2, one could provide a org.apache.maven.user-settings MAVEN_OPT setting. Maven3 no longer supports that \u003ca href=\"https://jira.codehaus.org/browse/MNG-5026\" rel=\"nofollow\"\u003eoption\u003c/a\u003e. \u003c/p\u003e\n\n\u003cp\u003e\u003cem\u003eThings I've tried:\u003c/em\u003e\u003c/p\u003e\n\n\u003col\u003e\n\u003cli\u003ethe afore-mentioned org.apache.maven.user-settings\u003c/li\u003e\n\u003cli\u003ealias mvn='mvn -s /path/to/project-settings.xml'\u003c/li\u003e\n\u003c/ol\u003e\n\n\u003cp\u003e\u003cem\u003eThings I've considered, but haven't yet tried:\u003c/em\u003e\u003c/p\u003e\n\n\u003cul\u003e\n\u003cli\u003eWriting a wrapping \u003ca href=\"http://mojo.codehaus.org/exec-maven-plugin/usage.html\" rel=\"nofollow\"\u003emvn exec\u003c/a\u003e, so I can execute my job from within Jenkins and provide an alternate settings file via its means\u003c/li\u003e\n\u003c/ul\u003e\n\n\u003cp\u003eI've seen threads where others have wrestled with this, but haven't yet seen a proposed solution. \u003c/p\u003e","answer_count":"1","comment_count":"0","creation_date":"2014-07-16 18:43:58.537 UTC","last_activity_date":"2014-07-16 19:20:37.44 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"872848","post_type_id":"1","score":"2","tags":"bash|maven","view_count":"253"} {"id":"20418189","title":"Is there a way to tell C to never allocate memory dynamically?","body":"\u003cp\u003eI want to write a C program whose memory will be constant. It can never allocate more memory than certain amount. \u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eint main(){\n int memory[512];\n int i = 5;\n i = i + 5;\n memory[50] = i;\n};\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eNotice that on this example \u003ccode\u003ei = 5\u003c/code\u003e and \u003ccode\u003ei = i+5\u003c/code\u003e will allocate memory. I want to completely avoid the internal memory allocation procedure (which I believe is kind of slow).´Is there a way to tell C to allocate it directly on my memory array?\u003c/p\u003e","answer_count":"5","comment_count":"11","creation_date":"2013-12-06 07:13:48.517 UTC","last_activity_date":"2013-12-06 08:08:33.347 UTC","last_edit_date":"2013-12-06 07:41:22.953 UTC","last_editor_display_name":"","last_editor_user_id":"1031791","owner_display_name":"","owner_user_id":"1031791","post_type_id":"1","score":"-4","tags":"c|memory-management","view_count":"172"} @@ -2796,7 +2796,7 @@ {"id":"33138059","title":"Using API Apps with Swagger in dev/test/production environments","body":"\u003cp\u003eI'm migrating a combined Azure Website (with both Controllers and ApiControllers) to a split Web App and API App. Let's call it MyApp.\u003c/p\u003e\n\n\u003cp\u003eI've created MyAppDevApi, MyAppTestApi, and MyAppProductionApi API Apps (in different App Services) to host the three environments, expecting to promote code from one environment to another.\u003c/p\u003e\n\n\u003cp\u003eSo far, I've only deployed to MyAppDevApi, since I'm just getting started.\u003c/p\u003e\n\n\u003cp\u003eWhen I do \u003ccode\u003eAdd/Azure API App Client\u003c/code\u003e to my UI-only project to start referring to the API app, and I point it to MyAppDevApi, it uses AutoRest to create classes in my code. These classes now all have the name MyAppDevApi, rather than just MyAppApi, which is the actual namespace of the code I'm deploying to every environment. Obviously, I can't check that in... how can I promote that through Test and Prod?\u003c/p\u003e\n\n\u003cp\u003eThere's nothing in the Swagger JSON that refers to this name, so it must be on the AutoRest side (I think).\u003c/p\u003e\n\n\u003cp\u003eHas anyone come up with a strategy or work-around to deal with this multiple-environment promotion issue with API Apps?\u003c/p\u003e\n\n\u003cp\u003e\u003cstrong\u003eEdit\u003c/strong\u003e\u003c/p\u003e\n\n\u003cp\u003eSo far the best thing I've come up with is to download the Swagger from the API App to a local file (which, again, has only the namespace from the original code, not from the name of the API App), and then import it into the Web App. This will generate classes in the Web App that have the naming I expect.\u003c/p\u003e\n\n\u003cp\u003eThe problem is that I then have to edit the generated MyAppApi.cs file's _baseUri property to pull from an AppSetting, have the different web.config.dev, .test, .prod, and then do the web.config transform. I mean, that'll work, but then every time I change the API App's interface, I'll regenerate... and then I'll have remember to change the _baseUri again... and someone, sometime is going to forget to do this and then deploy to production. It's really, really fragile.\u003c/p\u003e\n\n\u003cp\u003eSo... anyone have a better idea?\u003c/p\u003e","accepted_answer_id":"33223273","answer_count":"2","comment_count":"0","creation_date":"2015-10-15 00:53:11.64 UTC","last_activity_date":"2015-10-19 20:24:45.127 UTC","last_edit_date":"2015-10-15 19:22:36.373 UTC","last_editor_display_name":"","last_editor_user_id":"1179286","owner_display_name":"","owner_user_id":"1179286","post_type_id":"1","score":"1","tags":"azure|azure-web-sites|azure-api-apps","view_count":"375"} {"id":"869969","title":"When using Excel's \"Print Titles\" how do i change the titles midway down the sheet","body":"\u003cp\u003eI have a classic ASP web app that outputs reports to Excel, but it's really just html.\u003c/p\u003e\n\n\u003cp\u003eSome reports output with multiple groups and each group can span multiple pages (vertically). I'm aware of the \"Page Titles\" ability of Excel to print a specified row (or rows) on every page, however, I need the title of each group to also display in the title. Otherwise the title of the first group gets displayed as the title of every group.\u003c/p\u003e\n\n\u003cp\u003eI saw on google groups that someone suggested putting each group on a separate worksheet however I don't think I can output multiple worksheets easily - or at all - using html alone.\u003c/p\u003e\n\n\u003cp\u003eI'm looking for a quick and dirty solution as I don't have much time to devote to maintaining this crufty old app.\u003c/p\u003e","answer_count":"1","comment_count":"0","creation_date":"2009-05-15 17:38:30.203 UTC","last_activity_date":"2010-11-07 13:54:18.82 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"16162","post_type_id":"1","score":"0","tags":"asp-classic|printing|export-to-excel","view_count":"1192"} {"id":"46400679","title":"How to wrap it up?","body":"\u003cp\u003eI have following code snippet from the haskellbook that shows step by step, how monad transformer is going to unwrap: \u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003emodule OuterInner where\n\n import Control.Monad.Trans.Except\n import Control.Monad.Trans.Maybe\n import Control.Monad.Trans.Reader\n\n -- We only need to use return once\n -- because it's one big Monad\n embedded :: MaybeT (ExceptT String (ReaderT () IO)) Int\n embedded = return 1\n\n maybeUnwrap :: ExceptT String (ReaderT () IO) (Maybe Int)\n maybeUnwrap = runMaybeT embedded\n\n -- Next\n eitherUnwrap :: ReaderT () IO (Either String (Maybe Int))\n eitherUnwrap = runExceptT maybeUnwrap\n\n -- Lastly\n readerUnwrap :: () -\u0026gt; IO (Either String (Maybe Int))\n readerUnwrap = runReaderT eitherUnwrap\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eThere is an exercise, that I have to wrap everything again: \u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eembedded :: MaybeT (ExceptT String (ReaderT () IO)) Int\nembedded = ??? (const (Right (Just 1))) \n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eI tried as follows: \u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eembedded' :: MaybeT (ExceptT String (ReaderT () IO)) Int\nembedded' = MaybeT (ExceptT (ReaderT (const (Right (Just 1)))))\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003ebut the compiler complains:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eD:\\haskell\\chapter26\\src\\OuterInner.hs:24:15: error:\n * Couldn't match type `Either a0' with `IO'\n Expected type: MaybeT (ExceptT String (ReaderT () IO)) Int\n Actual type: MaybeT (ExceptT String (ReaderT () (Either a0))) Int\n * In the expression:\n MaybeT (ExceptT (ReaderT (const (Right (Just 1)))))\n In an equation for embedded':\n embedded' = MaybeT (ExceptT (ReaderT (const (Right (Just 1)))))\n\nD:\\haskell\\chapter26\\src\\OuterInner.hs:24:32: error:\n * Couldn't match type `Maybe Integer'\n with `Either String (Maybe Int)'\n Expected type: ReaderT () (Either a0) (Either String (Maybe Int))\n Actual type: ReaderT () (Either a0) (Maybe Integer)\n * In the first argument of `ExceptT', namely\n `(ReaderT (const (Right (Just 1))))'\n In the first argument of `MaybeT', namely\n `(ExceptT (ReaderT (const (Right (Just 1)))))'\n In the expression:\n MaybeT (ExceptT (ReaderT (const (Right (Just 1)))))\nFailed, modules loaded: none.\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eHow to solve the exercise?\u003c/p\u003e","accepted_answer_id":"46403024","answer_count":"2","comment_count":"0","creation_date":"2017-09-25 08:26:57.547 UTC","last_activity_date":"2017-09-25 10:30:11.8 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"1743843","post_type_id":"1","score":"1","tags":"haskell","view_count":"63"} -{"id":"44665607","title":"Kinesis writing to ElasticSearch and S3","body":"\u003cp\u003eI am using AWS Kinesis to write to Elastic Search and taking S3 as the backup. So, it is writing to both the sources. But I observed one problem that it does not push to S3 at same time as when it pushes to Elastic Search. So, does it do periodically or something like that? Any explanation if anyone could give would be appreciated. Also, if this is the case, is there any way to change it?\u003c/p\u003e","accepted_answer_id":"44711574","answer_count":"1","comment_count":"2","creation_date":"2017-06-21 01:50:26.29 UTC","last_activity_date":"2017-06-23 01:10:51.32 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"1523785","post_type_id":"1","score":"0","tags":"amazon-web-services|elasticsearch|amazon-s3|amazon-kinesis","view_count":"44"} +{"id":"44665607","title":"Kinesis writing to OpenSearch and S3","body":"\u003cp\u003eI am using AWS Kinesis to write to opensearch Search and taking S3 as the backup. So, it is writing to both the sources. But I observed one problem that it does not push to S3 at same time as when it pushes to opensearch Search. So, does it do periodically or something like that? Any explanation if anyone could give would be appreciated. Also, if this is the case, is there any way to change it?\u003c/p\u003e","accepted_answer_id":"44711574","answer_count":"1","comment_count":"2","creation_date":"2017-06-21 01:50:26.29 UTC","last_activity_date":"2017-06-23 01:10:51.32 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"1523785","post_type_id":"1","score":"0","tags":"amazon-web-services|opensearch|amazon-s3|amazon-kinesis","view_count":"44"} {"id":"32222266","title":"Regex not returning any matches for UPS tracking numbers","body":"\u003cp\u003eI have a string \u003ccode\u003etrackingNumber=\"1Z96Y3W80340983689\"\u003c/code\u003e \nfor which I want to test a regex pattern against;\nwith the regular expression: \u003ccode\u003e\"/1Z\\\\?\\[0-9A-Z]\\\\{3}\\\\?\\[0-9A-Z]\\\\{3}\\\\?\\[0-9A-Z]\\\\{2}\\\\?\\[0-9A-Z]\\\\{4}\\\\?[0-9A-Z]{3}\\\\?\\[0-9A-Z]|\\[\\dT]\\\\d\\\\d\\\\d\\\\?\\\\d\\\\d\\\\d\\\\d\\\\?\\\\d\\\\d\\\\d/i\"\u003c/code\u003e\nin java\u003c/p\u003e\n\n\u003cp\u003eBut I'm not getting any matches for my regex.\u003c/p\u003e","answer_count":"2","comment_count":"5","creation_date":"2015-08-26 09:02:48.717 UTC","last_activity_date":"2015-08-26 10:04:29.787 UTC","last_edit_date":"2015-08-26 10:04:29.787 UTC","last_editor_display_name":"","last_editor_user_id":"642572","owner_display_name":"","owner_user_id":"3153599","post_type_id":"1","score":"-3","tags":"java|regex","view_count":"453"} {"id":"44138828","title":"XMLHttpRequest - Parsing out attributes - JS","body":"\u003cp\u003eFirst off, this is a complete newbie question. I don't really have much idea of what I'm doing.\u003c/p\u003e\n\n\u003cp\u003eI have an API that returns the top 10 fund raisers from JustGiving. I can get the XML info to display, however it just dumps everything out all together. This is what JS I have so far:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003evar xhr = new XMLHttpRequest();\nxhr.open(\"GET\", \"https://api.justgiving.com/{appid}/v1/event/{eventID}/leaderboard?currency=gbp/\", true);\nxhr.responseJSON;\nxhr.send();\n\nxhr.onreadystatechange = processRequest;\n\nfunction processRequest(e) {\n if (xhr.readyState == 4 \u0026amp;\u0026amp; xhr.status == 200) {\n document.write(xhr.responseText);\n }\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eI have been look for hours at different ways to get this information output into something I can manipulate on a web page. Something that can be wrapped in a div.\u003c/p\u003e\n\n\u003cp\u003ePretty sure its this I need to modify...\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003edocument.write(xhr.responseText);\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003ePlease help or point me in the right direction. Or if I've gone completely in the wrong direction let me know. There is probably already a solution out there, but as my knowledge is very limited I'm probably wording all my searches wrong.\u003c/p\u003e\n\n\u003cp\u003eThe documentation for the API is \u003ca href=\"https://api.justgiving.com/docs/resources/v1/Leaderboard/GetEventLeaderboard\" rel=\"nofollow noreferrer\"\u003ehttps://api.justgiving.com/docs/resources/v1/Leaderboard/GetEventLeaderboard\u003c/a\u003e\u003c/p\u003e\n\n\u003cp\u003eMany thanks in advance.\u003c/p\u003e","answer_count":"1","comment_count":"3","creation_date":"2017-05-23 15:16:34.437 UTC","last_activity_date":"2017-05-23 15:33:36.727 UTC","last_edit_date":"2017-05-23 15:31:12.96 UTC","last_editor_display_name":"","last_editor_user_id":"2240261","owner_display_name":"","owner_user_id":"2240261","post_type_id":"1","score":"1","tags":"javascript|xmlhttprequest","view_count":"33"} {"id":"29689964","title":"How to Censor a SPECIFIC word in Java using Regex","body":"\u003cp\u003eI'd like to know how to censor the word \"ass\" (or A word) using a Java Regex highly safe. \u003c/p\u003e\n\n\u003cp\u003eThis makes things difficult as the A word can be contained in a lot of other harmless words. For example, \"grass.\"\u003c/p\u003e\n\n\u003cp\u003eI have setup partially the beginning part for a lot of the prefixes of the A word, but can't seem to find how to censor the word without censoring suffixes like \"assassin.\"\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eString string = String.replaceAll(\"^(?!(b|B|gr|gR|Gr|GR|gl|gL|Gl|GL|m|M|s|S|h|H|p|P|g|G)).*[aA4]+[\\\\W[_]]*?[$5SszZ]+[\\\\W[_]]*?[$5SszZ]+\", \"***\");\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eThis I find is very hard, and still can not find a solution yet.\u003c/p\u003e","answer_count":"1","comment_count":"2","creation_date":"2015-04-17 03:52:56.297 UTC","favorite_count":"1","last_activity_date":"2015-04-23 03:22:05.637 UTC","last_edit_date":"2015-04-17 05:45:40.923 UTC","last_editor_display_name":"","last_editor_user_id":"4377568","owner_display_name":"","owner_user_id":"2954625","post_type_id":"1","score":"0","tags":"java|regex|profanity","view_count":"418"} @@ -3033,7 +3033,7 @@ {"id":"10240980","title":"NullPointerException VideoView and MediaController on Fragment in android","body":"\u003cp\u003eI am developing Galaxy Tab 10.1 app by using honeycomb 3.1 and i have a videoview and mediacontroller in fragment on the right side. I defined VideoView and MediaController in Layout xml file and instantiate and manipulate them in the related java file.\u003c/p\u003e\n\n\u003cp\u003eAs you guys konw, in the java file, i set VideoView's controller to MediaController and set MediaController's media player to VideoView, i defined.\u003c/p\u003e\n\n\u003cp\u003eBelow is fragment layout xml file\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e\u0026lt;LinearLayout\n android:layout_width=\"match_parent\"\n android:layout_height=\"match_parent\"\n android:orientation=\"vertical\" \u0026gt;\n\n \u0026lt;VideoView\n android:id=\"@+id/video\"\n android:layout_width=\"match_parent\"\n android:layout_height=\"match_parent\" /\u0026gt;\n\n \u0026lt;MediaController\n android:id=\"@+id/controller\"\n android:layout_width=\"match_parent\"\n android:layout_height=\"wrap_content\"\n android:layout_gravity=\"center_horizontal\" /\u0026gt;\n\u0026lt;/LinearLayout\u0026gt;\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eand java code file is below\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003epublic class ContentFragment extends Fragment {\n private VideoView mVideo;\n private MediaController mController;\n\n @Override\n public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {\n View view = inflater.inflate(R.layout.content, null);\n mVideo = (VideoView)view.findViewById(R.id.video);\n mController = (MediaController)view.findViewById(R.id.controller);\n mVideo.setMediaController(mController);\n mController.setMediaPlayer(mVideo);\n\n return view;\n }\n\n public void playVideo(String path) {\n mVideo.setVideoPath(path);\n mVideo.requestFocus();\n }\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eBut while running this app, there occurs \u003c/p\u003e\n\n\u003cblockquote\u003e\n \u003cp\u003eNullPointerException with\n android.widget.MediaController.show(MediaController.java:305)\u003c/p\u003e\n\u003c/blockquote\u003e\n\n\u003cp\u003eI tried to solve this error a whole day but i can't get the reason why. actullay there isn't enought information for this.\nDose any body know what i did wrong? or have solutions?\nPlease let me know.\u003c/p\u003e\n\n\u003cp\u003eThanks.\u003c/p\u003e","accepted_answer_id":"10241331","answer_count":"2","comment_count":"0","creation_date":"2012-04-20 05:53:25.987 UTC","last_activity_date":"2016-03-25 13:14:34.53 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"1343302","post_type_id":"1","score":"0","tags":"android|nullpointerexception|show|fragment|mediacontroller","view_count":"3452"} {"id":"37290659","title":"Angular ng-style on body tag for different page backgrounds?","body":"\u003cp\u003eI am new to angular and ngroute and am trying to use ng-style to have a different image background for each page of a website. Currently it sets the background of all the site's pages, even when I have different controller scope image urls.\u003c/p\u003e\n\n\u003cp\u003eMy html is like this:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e\u0026lt;body ng-controller=\"mainController\" ng-style=\"bodyStyles\"\u0026gt;\n...\n \u0026lt;div id=\"main\"\u0026gt;\n \u0026lt;div ng-view\u0026gt;\u0026lt;/div\u0026gt;\n\u0026lt;/body\u0026gt;\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eMy script:\n var angVenture = angular.module('angVenture', ['ngRoute']);\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e// configure routes\nangVenture.config(function($routeProvider, $locationProvider) {\n $routeProvider\n\n // route for the index page\n .when('/home', {\n templateUrl : 'pages/home.html',\n controller : 'mainController'\n })\n\n // route for the about page\n .when('/about', {\n templateUrl : 'pages/about.html',\n controller : 'aboutController'\n })\n\n ... more routes..... \n\n// create the controller\nangVenture.controller('mainController', function($scope) {\n // create a message to display in our view\n $scope.message = 'home page';\n $scope.bodyStyles ={\n \"background\": \"url(../images/someimage.jpg) no-repeat center center fixed\", \n \"-webkit-background-size\": \"cover\",\n \"-moz-background-size\": \"cover\",\n \"-o-background-size\": \"cover\",\n \"background-size\": \"cover\"\n }\n});\n\nangVenture.controller('aboutController', function($scope) {\n $scope.message = 'another page.';\n});\n\n....more controllers for different pages...\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eWould I be better off going about doing this with ui-router?\u003c/p\u003e","accepted_answer_id":"37290712","answer_count":"2","comment_count":"0","creation_date":"2016-05-18 05:00:49.377 UTC","last_activity_date":"2016-05-18 05:15:06.71 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"4810599","post_type_id":"1","score":"1","tags":"javascript|css|angularjs|ngroute|ng-style","view_count":"563"} {"id":"3874677","title":"The number of processes a user is running using bash","body":"\u003cp\u003eI would like to know how I could get the number of processes for each user that is currently logged in.\u003c/p\u003e","accepted_answer_id":"3875588","answer_count":"8","comment_count":"0","creation_date":"2010-10-06 16:19:15.433 UTC","favorite_count":"1","last_activity_date":"2017-04-07 17:24:40.43 UTC","last_edit_date":"2016-10-15 19:37:55.217 UTC","last_editor_display_name":"","last_editor_user_id":"212378","owner_display_name":"","owner_user_id":"438171","post_type_id":"1","score":"8","tags":"linux|bash|scripting","view_count":"18671"} -{"id":"42715276","title":"elastic search aggregate doesn't return data","body":"\u003cp\u003eIt possible that aggregate function returns data instead of count?\u003c/p\u003e\n\n\u003cp\u003eRight now I get:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003earray (size=3)\n'doc_count_error_upper_bound' =\u0026gt; int 0\n'sum_other_doc_count' =\u0026gt; int 0\n'buckets' =\u0026gt; \narray (size=2)\n 0 =\u0026gt; \n array (size=2)\n 'key' =\u0026gt; int 15\n 'doc_count' =\u0026gt; int 2\n 1 =\u0026gt; \n array (size=2)\n 'key' =\u0026gt; int 14\n 'doc_count' =\u0026gt; int 1\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eBut this is useless cause I need the actual data that represents this doc_count of 2 and doc_count of 1\u003c/p\u003e\n\n\u003cp\u003eI'm using elastica.io\u003c/p\u003e","accepted_answer_id":"42716990","answer_count":"1","comment_count":"0","creation_date":"2017-03-10 09:51:33.833 UTC","last_activity_date":"2017-03-10 11:45:34.57 UTC","last_edit_date":"2017-03-10 10:34:35.667 UTC","last_editor_display_name":"","last_editor_user_id":"5759047","owner_display_name":"","owner_user_id":"5759047","post_type_id":"1","score":"0","tags":"php|elasticsearch|elastica","view_count":"39"} +{"id":"42715276","title":"opensearch search aggregate doesn't return data","body":"\u003cp\u003eIt possible that aggregate function returns data instead of count?\u003c/p\u003e\n\n\u003cp\u003eRight now I get:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003earray (size=3)\n'doc_count_error_upper_bound' =\u0026gt; int 0\n'sum_other_doc_count' =\u0026gt; int 0\n'buckets' =\u0026gt; \narray (size=2)\n 0 =\u0026gt; \n array (size=2)\n 'key' =\u0026gt; int 15\n 'doc_count' =\u0026gt; int 2\n 1 =\u0026gt; \n array (size=2)\n 'key' =\u0026gt; int 14\n 'doc_count' =\u0026gt; int 1\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eBut this is useless cause I need the actual data that represents this doc_count of 2 and doc_count of 1\u003c/p\u003e\n\n\u003cp\u003eI'm using opensearcha.io\u003c/p\u003e","accepted_answer_id":"42716990","answer_count":"1","comment_count":"0","creation_date":"2017-03-10 09:51:33.833 UTC","last_activity_date":"2017-03-10 11:45:34.57 UTC","last_edit_date":"2017-03-10 10:34:35.667 UTC","last_editor_display_name":"","last_editor_user_id":"5759047","owner_display_name":"","owner_user_id":"5759047","post_type_id":"1","score":"0","tags":"php|opensearch|opensearcha","view_count":"39"} {"id":"44647414","title":"Facebook Graph API Upgrade","body":"\u003cp\u003eI'm working on website that has an old facebook API implementation with Facebook SKD for PHP. I'm not able to update SDK to newer version since it requires PHP 5.4, this project works under 5.3 so I'm forced to use old one.\nI have access to API upgrade tool and it shows me a lot of methods that I should fix, for example GET /posts\nAssume I want upgrade to v2.4 and I use \u003ca href=\"https://developers.facebook.com/docs/graph-api/reference/v2.4/page/feed\" rel=\"nofollow noreferrer\"\u003elink\u003c/a\u003e to find out what changed in that request. What I found:\u003c/p\u003e\n\n\u003cblockquote\u003e\n \u003cp\u003e\u003cstrong\u003eDeprecated Fields\u003c/strong\u003e\u003c/p\u003e\n \n \u003cp\u003eAs of April 18, 2017, the following parameters are no longer supported\n by Graph API versions 2.9 and higher. For versions 2.8 and lower, the\n parameters will continue working until July 17, 2017.\u003c/p\u003e\n \n \u003cp\u003eThe \u003cstrong\u003elink\u003c/strong\u003e field is still supported, but its sub-fields have been\n deprecated. \u003c/p\u003e\n\u003c/blockquote\u003e\n\n\u003cp\u003eI feel myself so stupid but I can't realize \u003cstrong\u003ewhat should I use instead of \u003cem\u003elink\u003c/em\u003e field after it will be deprecated on 17th July\u003c/strong\u003e?\u003c/p\u003e","accepted_answer_id":"44648020","answer_count":"1","comment_count":"3","creation_date":"2017-06-20 08:10:54.43 UTC","last_activity_date":"2017-06-20 08:41:33.04 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"8186819","post_type_id":"1","score":"0","tags":"facebook|api|upgrade","view_count":"93"} {"id":"16977098","title":"quartz threw an unhandled Exception: : java.lang.NullPointerException with JSF+EJB","body":"\u003cp\u003eI am using JSF and EJB in my project.\u003c/p\u003e\n\n\u003cp\u003eI have one functionality where i need to send the sms to some people for every 1 hour.\u003c/p\u003e\n\n\u003cp\u003eFor that i am getting the information(some persons) from the database to whom i need to send.\u003c/p\u003e\n\n\u003cp\u003eWhile retrieving from the database it is throwing the following exceptions.\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e09:51:29,640 ERROR [org.quartz.core.JobRunShell] (DefaultQuartzScheduler_Worker-2) Job group1.job1 threw an unhandled Exception: : java.lang.NullPointerException\nat hms.general.SendSMSJob.execute(SendSMSJob.java:43) [classes:]\nat org.quartz.core.JobRunShell.run(JobRunShell.java:213) [quartz-all-2.1.7.jar:]\nat org.quartz.simpl.SimpleThreadPool$WorkerThread.run(SimpleThreadPool.java:557) [quartz-all-2.1.7.jar:]\n\n09:51:29,640 ERROR [org.quartz.core.ErrorLogger] (DefaultQuartzScheduler_Worker-2) Job (group1.job1 threw an exception.: org.quartz.SchedulerException: Job threw an unhandled exception. [See nested exception: java.lang.NullPointerException]\nat org.quartz.core.JobRunShell.run(JobRunShell.java:224) [quartz-all-2.1.7.jar:]\nat org.quartz.simpl.SimpleThreadPool$WorkerThread.run(SimpleThreadPool.java:557) [quartz-all-2.1.7.jar:]\nCaused by: java.lang.NullPointerException\nat hms.general.SendSMSJob.execute(SendSMSJob.java:43) [classes:]\nat org.quartz.core.JobRunShell.run(JobRunShell.java:213) [quartz-all-2.1.7.jar:]\n... 1 more\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eThe following code is for scheduler\n package hms.general;\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eimport javax.faces.bean.ApplicationScoped;\nimport javax.faces.bean.ManagedBean;\n\nimport org.quartz.JobDetail;\nimport org.quartz.Scheduler;\nimport org.quartz.SchedulerException;\nimport org.quartz.Trigger;\nimport org.quartz.impl.StdSchedulerFactory;\n\n\n@ManagedBean(eager=true)\n@ApplicationScoped\npublic class ScheduleBean {\npublic ScheduleBean() {\n try{\n Scheduler scheduler = StdSchedulerFactory.getDefaultScheduler();\n scheduler.start();\n\n JobDetail job = org.quartz.JobBuilder.newJob(SendSMSJob.class).withIdentity(\"job1\",\"group1\").build();\n Trigger trigger = org.quartz.TriggerBuilder.newTrigger().withIdentity(\"trigger1\",\"group1\").startNow().withSchedule(org.quartz.SimpleScheduleBuilder.simpleSchedule().withIntervalInHours(1).repeatForever()).build();\n scheduler.scheduleJob(job,trigger);\n\n }\n catch(SchedulerException se){\n se.getMessage();\n }\n}\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eCode for JobScheduler\n import hms.db.PatientRegEMRemote;\n import hms.db.Prescription;\n import hms.db.PrescriptionEMRemote;\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eimport java.util.List;\n\n\nimport javax.ejb.EJB;\n\nimport org.quartz.Job;\nimport org.quartz.JobExecutionContext;\nimport org.quartz.JobExecutionException;\n\npublic class SendSMSJob implements Job {\n\n@EJB(mappedName = \"java:global/Hms-Main/PatientRegEM!hms.db.PatientRegEMRemote\")\npublic PatientRegEMRemote patreg_r;\n\n@EJB(mappedName = \"java:global/Hms-Main/PrescriptionEM!hms.db.PrescriptionEMRemote\")\nprivate PrescriptionEMRemote prescription_r;\n\npublic static void main(String ar[]){\n SendSMSJob hj = new SendSMSJob();\n try {\n hj.execute(null);\n } catch (JobExecutionException e) {\n // TODO Auto-generated catch block\n e.printStackTrace();\n }\n}\n@Override\npublic void execute(JobExecutionContext arg0) throws JobExecutionException {\n System.out.println(arg0.getJobDetail());\n System.out.println(\"I am in execute method....\");\n // TODO Auto-generated method stub\n System.out.println(\"Im inside execute method\");\n String s=\"select p from prescription p where p.smsflag=1\";\n System.out.println(s);\n List\u0026lt;Prescription\u0026gt; pre=prescription_r.retrieveAll(s);\n System.out.println(\".........\");\n for (Prescription p : pre) {\n System.out.println(p.getAppointno());\n }\n}\n}\n\u003c/code\u003e\u003c/pre\u003e","answer_count":"1","comment_count":"0","creation_date":"2013-06-07 05:51:27.36 UTC","last_activity_date":"2013-06-07 06:23:42.46 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"2314868","post_type_id":"1","score":"1","tags":"jsf-2|ejb|quartz-scheduler","view_count":"3111"} {"id":"7662408","title":"Instantiating a generic extension of an abstract class","body":"\u003cp\u003eTrying to write some generalised code for Genetic Algorithms and I have an abstract class Genotype as follows:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003epublic abstract class Genotype {\nprivate ArrayList\u0026lt;Gene\u0026gt; genotype = new ArrayList\u0026lt;Gene\u0026gt;();\n\n//...\n\npublic Genotype(ArrayList\u0026lt;Gene\u0026gt; genotype) {\n setGenotype(genotype);\n setGenotypeLength(genotype.size());\n}\n\npublic abstract Phenotype\u0026lt;Gene\u0026gt; getPhenotype();\n\npublic abstract void mutate();\n\n//...\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eThis class is intended to be extended and the subclass obviously provides the implementation of getPhenotype() and mutate(). However, I also have a second class that takes two Genotype objects as parameters and returns an ArrayList containing Genotype objects. Since I don't know the type of the extended Genotype objects at this point I need to use a generic parameter as follows:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003epublic class Reproducer {\n\n//...\n\n private \u0026lt;G extends Genotype\u0026gt; ArrayList\u0026lt;Genotype\u0026gt; crossover(G parent1, G parent2) {\n ArrayList\u0026lt;Genotype\u0026gt; children = new ArrayList\u0026lt;Genotype\u0026gt;();\n\n ArrayList\u0026lt;Gene\u0026gt; genotypeOne = ArrayListCloner.cloneArrayList(parent1.getGenotype());\n ArrayList\u0026lt;Gene\u0026gt; genotypeTwo = ArrayListCloner.cloneArrayList(parent2.getGenotype());\n\n //one point crossover\n int p = gen.nextInt(genotypeOne.size());\n\n for (int i = 0; i \u0026lt; p; i++) {\n genotypeOne.set(i, genotypeOne.get(i));\n genotypeTwo.set(i, genotypeTwo.get(i));\n }\n for (int i = p; i \u0026lt; 10; i++) {\n genotypeOne.set(i, genotypeTwo.get(i));\n genotypeTwo.set(i, genotypeOne.get(i));\n }\n\n children.add(new G(genotypeOne)); //THROWS ERROR: Cannot instantiate the type G\n children.add(new G(genotypeTwo)); //THROWS ERROR: Cannot instantiate the type G\n\n return children;\n }\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eHowever, since I need to return two objects of type G in an ArrayList I clearly have a problem where I can't instantiate the new Genotype objects because they're 1. generic types and presumably 2. abstract.\u003c/p\u003e\n\n\u003cp\u003eThis might be a bad way of going about things all together but if anyone has a solution that would be great. Thank you.\u003c/p\u003e","answer_count":"2","comment_count":"0","creation_date":"2011-10-05 13:54:19.99 UTC","last_activity_date":"2011-10-25 16:37:12.99 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"536840","post_type_id":"1","score":"1","tags":"java|generics|parameters|abstract|instantiation","view_count":"1021"} @@ -3088,7 +3088,7 @@ {"id":"26556288","title":"texture2d rectangle XNA wont initialize","body":"\u003cp\u003eI have a rather basic Texture2D name rect and I am just trying to initialize it. It tells me that a field initializer cannot reference the non-static field, method or property \"graphics\"\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e public class Game1 : Microsoft.Xna.Framework.Game\n{\n GraphicsDeviceManager graphics;\n SpriteBatch spriteBatch;\n\n //my variables and stuff I delcare\n\n //texture we can render\n Texture2D myTexture;\n\n //set coords to draw the spirte\n Vector2 spritePos = new Vector2(300.0f, 330.0f);\n\n //some info about motion\n Vector2 spriteSpeed = new Vector2(0f, 0f);\n\n KeyboardState oldState;\n double boost = 15;\n\n //boost level rectange this is the issue below+\n Texture2D rect = new Texture2D(graphics.GraphicsDevice, 80, 30);\n\n public Game1()\n {\n graphics = new GraphicsDeviceManager(this);\n Content.RootDirectory = \"Content\";\n }\n\u003c/code\u003e\u003c/pre\u003e","accepted_answer_id":"26556335","answer_count":"1","comment_count":"1","creation_date":"2014-10-24 21:07:06.167 UTC","last_activity_date":"2014-10-24 21:11:16.023 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"3908256","post_type_id":"1","score":"0","tags":"c|xna","view_count":"49"} {"id":"14032208","title":"ToList slow performance vs foreach slow performance","body":"\u003cp\u003eI am building program that use DataBase with 3 tables(Worker, Task, TaskStep)\nand i have a method that get date and build report for specific worker\nof the task and there steps for the specific day.\u003c/p\u003e\n\n\u003cp\u003eThe data base structure is as follow:\u003c/p\u003e\n\n\u003cp\u003eMySQL 5.2\u003c/p\u003e\n\n\u003cp\u003e\u003ccode\u003eWorker\u003c/code\u003e table columns:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eworkerID(VARCHAR(45)),\nname(VARCHAR(45)),\nage(int),\n...\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003e\u003ccode\u003eTasks\u003c/code\u003e table columns:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eTaskID(VARCHAR(45)),\ndescription(VARCHAR(45)),\ndate(DATE),\n...\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003e\u003ccode\u003eTaskSteps\u003c/code\u003e table columns:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eTaskStepID(VARCHAR(45)),\ndescription(VARCHAR(45)),\ndate(DATE),\n...\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eNo indexing on any table\u003c/p\u003e\n\n\u003cp\u003eThe problem is thats it is very very slow!! (~ 20 seconds)\u003c/p\u003e\n\n\u003cp\u003eHere is the code:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eusing WorkerDailyReport = Dictionary\u0026lt;task, IEnumerable\u0026lt;taskStep\u0026gt;\u0026gt;;\n\nprivate void Buildreport(DateTime date)\n{\n var report = new WorkerDailyReport(); \n\n // Load from DB\n var sw = new Stopwatch();\n sw.Start();\n\n var startOfDay = date.Date;\n var endOfDay = startOfDay.AddDays(1);\n var db = new WorkEntities();\n\n const string workerID = \"80900855\";\n\n IEnumerable\u0026lt;task\u0026gt; _tasks = db.task\n .Where(ta =\u0026gt; ta.date \u0026gt;= startOfDay \u0026amp;\u0026amp;\n ta.date \u0026lt; endOfDay \u0026amp;\u0026amp;\n ta.workerID == workerID)\n .ToList();\n\n sw.Stop();\n Console.WriteLine(\"Load From DB time - \" + sw.Elapsed + \n \", Count - \" + _tasks.Count()); \n\n // Build the report\n sw.Restart();\n\n foreach (var t in _tasks)\n {\n var ts = db.taskStep.Where(s =\u0026gt; s.taskID == task.taskID);\n\n report.Add(t, ts);\n }\n\n sw.Stop();\n Console.WriteLine(\"Build report time - \" + sw.Elapsed);\n\n // Do somthing with the report\n foreach (var t in report)\n {\n sw.Restart();\n\n foreach (var subNode in t.Value)\n {\n // Do somthing..\n }\n\n Console.WriteLine(\"Do somthing time - \" + sw.Elapsed + \n \", Count - \" + t.Value.Count());\n }\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eAs u can see i put StopWatch in each part to check what take so long\nand this is the results:\u003c/p\u003e\n\n\u003cp\u003e1)\u003c/p\u003e\n\n\u003cp\u003eIf i run the code as above:\u003c/p\u003e\n\n\u003cp\u003eConsole:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eLoad From DB time - 00:00:00.0013774, Count - 577\n\nBuild report time - 00:00:03.6305722\n\nDo somthing time - 00:00:07.7573754, Count - 21\n\nDo somthing time - 00:00:08.2811928, Count - 11\n\nDo somthing time - 00:00:07.8715531, Count - 14\n\nDo somthing time - 00:00:08.0430597, Count - 0\n\nDo somthing time - 00:00:07.7867790, Count - 9\n\nDo somthing time - 00:00:07.3485209, Count - 39\n\n.........\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003ethe inner foreach run takes about 7-9!! Sec to run over no more then\n40 record.\u003c/p\u003e\n\n\u003cp\u003e2)\u003c/p\u003e\n\n\u003cp\u003eIf i change only one thing, Add .ToList() after the first query\nwhen i load the worker tasks from the Data Base it changes\neverithing.\u003c/p\u003e\n\n\u003cp\u003eConsole:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eLoad From DB time - 00:00:04.3568445, Count - 577\n\nBuild report time - 00:00:00.0018535\n\nDo somthing time - 00:00:00.0191099, Count - 21\n\nDo somthing time - 00:00:00.0144895, Count - 11\n\nDo somthing time - 00:00:00.0150208, Count - 14\n\nDo somthing time - 00:00:00.0179021, Count - 0\n\nDo somthing time - 00:00:00.0151372, Count - 9\n\nDo somthing time - 00:00:00.0155703, Count - 39\n\n.........\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eNow the load from DataBase takes lot more time, 4+ sec.\nBut the Built report time is about ~1ms \nAnd each inner foreach takes ~10ms\u003c/p\u003e\n\n\u003cp\u003eThe first way is imposible(577 * ~8 seconds) and the seconde option \nis also very slow and i cant see y.\u003c/p\u003e\n\n\u003cp\u003eAny idea what happening here? \u003c/p\u003e\n\n\u003cp\u003e1) Why the \u003ccode\u003eToList()\u003c/code\u003e so slow ?\u003c/p\u003e\n\n\u003cp\u003e2) Why without the \u003ccode\u003eToList()\u003c/code\u003e, The inner \u003ccode\u003eforeach\u003c/code\u003e and the Build report is slowing?\u003c/p\u003e\n\n\u003cp\u003eHow can i make it faster?\u003c/p\u003e\n\n\u003cp\u003ethnx.\u003c/p\u003e","answer_count":"3","comment_count":"4","creation_date":"2012-12-25 16:12:49.307 UTC","last_activity_date":"2013-12-19 07:07:53.4 UTC","last_edit_date":"2013-12-19 07:07:53.4 UTC","last_editor_display_name":"","last_editor_user_id":"842218","owner_display_name":"","owner_user_id":"1365625","post_type_id":"1","score":"2","tags":"performance|linq|c#-4.0|entity-framework-4|tolist","view_count":"3396"} {"id":"16063518","title":"What does this statement mean in C#?","body":"\u003cp\u003eWhat does \u003ccode\u003eif ((a \u0026amp; b) == b)\u003c/code\u003e mean in the following code block?\u003c/p\u003e\n\n\u003cpre class=\"lang-cs prettyprint-override\"\u003e\u003ccode\u003eif ((e.Modifiers \u0026amp; Keys.Shift) == Keys.Shift)\n{\n lbl.Text += \"\\n\" + \"Shift was held down.\";\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eWhy is it not like this?\u003c/p\u003e\n\n\u003cpre class=\"lang-cs prettyprint-override\"\u003e\u003ccode\u003eif (e.Modifiers == Keys.Shift)\n{\n lbl.Text += \"\\n\" + \"Shift was held down.\";\n}\n\u003c/code\u003e\u003c/pre\u003e","accepted_answer_id":"16063748","answer_count":"8","comment_count":"2","creation_date":"2013-04-17 15:00:46.73 UTC","last_activity_date":"2013-04-18 15:03:41.293 UTC","last_edit_date":"2013-04-17 17:16:11.383 UTC","last_editor_display_name":"","last_editor_user_id":"682480","owner_display_name":"","owner_user_id":"2284963","post_type_id":"1","score":"11","tags":"c#|if-statement","view_count":"603"} -{"id":"35393765","title":"Convert log message timestamp to UTC before sroring it in Elasticsearch","body":"\u003cp\u003eI am collecting and parsing Tomcat access-log messages using Logstash, and am storing the parsed messages in Elasticsearch.\nI am using Kibana to display the log messges in Elasticsearch.\nCurrently I am using Elasticsearch 2.0.0, Logstash 2.0.0, and Kibana 4.2.1.\u003c/p\u003e\n\n\u003cp\u003eAn access-log line looks something like the following:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e02-08-2016 19:49:30.669 ip=11.22.333.444 status=200 tenant=908663983 user=0a4ac75477ed42cfb37dbc4e3f51b4d2 correlationId=RID-54082b02-4955-4ce9-866a-a92058297d81 request=\"GET /pwa/rest/908663983/rms/SampleDataDeployment HTTP/1.1\" userType=Apache-HttpClient requestInfo=- duration=4 bytes=2548 thread=http-nio-8080-exec-5 service=rms itemType=SampleDataDeployment itemOperation=READ dataLayer=MongoDB incomingItemCnt=0 outgoingItemCnt=7 \n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eThe time displayed in the log file (ex. 02-08-2016 19:49:30.669) is in local time (not UTC!)\u003c/p\u003e\n\n\u003cp\u003eHere is how I parse the message line:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003efilter {\n\n grok {\n match =\u0026gt; { \"message\" =\u0026gt; \"%{DATESTAMP:logTimestamp}\\s+\" }\n }\n\n kv {}\n\n mutate {\n convert =\u0026gt; { \"duration\" =\u0026gt; \"integer\" }\n convert =\u0026gt; { \"bytes\" =\u0026gt; \"integer\" }\n convert =\u0026gt; { \"status\" =\u0026gt; \"integer\" }\n convert =\u0026gt; { \"incomingItemCnt\" =\u0026gt; \"integer\" }\n convert =\u0026gt; { \"outgoingItemCnt\" =\u0026gt; \"integer\" }\n\n gsub =\u0026gt; [ \"message\", \"\\r\", \"\" ]\n }\n\n grok {\n match =\u0026gt; { \"request\" =\u0026gt; [ \"(?:%{WORD:method} %{NOTSPACE:request}(?: HTTP/%{NUMBER:httpVersion})?)\" ] }\n overwrite =\u0026gt; [ \"request\" ] \n }\n\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eI would like Logstash to convert the time read from the log message ('logTimestamp' field) into UTC before storing it in Elasticsearch.\u003c/p\u003e\n\n\u003cp\u003eCan someone assist me with that please?\u003c/p\u003e\n\n\u003cp\u003e--\u003c/p\u003e\n\n\u003cp\u003eI have added the \u003cem\u003edate\u003c/em\u003e filter to my processing, but I had to add a \u003cem\u003etimezone\u003c/em\u003e.\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e filter {\n grok {\n match =\u0026gt; { \"message\" =\u0026gt; \"%{DATESTAMP:logTimestamp}\\s+\" }\n }\n\n date {\n match =\u0026gt; [ \"logTimestamp\" , \"mm-dd-yyyy HH:mm:ss.SSS\" ]\n timezone =\u0026gt; \"Asia/Jerusalem\"\n target =\u0026gt; \"logTimestamp\"\n }\n\n ...\n }\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eIs there a way to convert the date to UTC without supplying the local timezone, such that Logstash takes the timezone of the machine it is running on?\u003c/p\u003e\n\n\u003cp\u003eThe motivation behind this question is I would like to use the same configuration file in all my deployments, in various timezones.\u003c/p\u003e","answer_count":"1","comment_count":"0","creation_date":"2016-02-14 15:56:59.2 UTC","favorite_count":"0","last_activity_date":"2016-02-15 10:55:52.107 UTC","last_edit_date":"2016-02-15 10:55:52.107 UTC","last_editor_display_name":"","last_editor_user_id":"5524030","owner_display_name":"","owner_user_id":"5524030","post_type_id":"1","score":"1","tags":"elasticsearch|logstash|utc","view_count":"1032"} +{"id":"35393765","title":"Convert log message timestamp to UTC before sroring it in OpenSearch","body":"\u003cp\u003eI am collecting and parsing Tomcat access-log messages using Logstash, and am storing the parsed messages in OpenSearch.\nI am using OpenSearch Dashboards to display the log messges in OpenSearch.\nCurrently I am using OpenSearch2.0.0, Logstash 2.0.0, and OpenSearch Dashboards 4.2.1.\u003c/p\u003e\n\n\u003cp\u003eAn access-log line looks something like the following:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e02-08-2016 19:49:30.669 ip=11.22.333.444 status=200 tenant=908663983 user=0a4ac75477ed42cfb37dbc4e3f51b4d2 correlationId=RID-54082b02-4955-4ce9-866a-a92058297d81 request=\"GET /pwa/rest/908663983/rms/SampleDataDeployment HTTP/1.1\" userType=Apache-HttpClient requestInfo=- duration=4 bytes=2548 thread=http-nio-8080-exec-5 service=rms itemType=SampleDataDeployment itemOperation=READ dataLayer=MongoDB incomingItemCnt=0 outgoingItemCnt=7 \n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eThe time displayed in the log file (ex. 02-08-2016 19:49:30.669) is in local time (not UTC!)\u003c/p\u003e\n\n\u003cp\u003eHere is how I parse the message line:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003efilter {\n\n grok {\n match =\u0026gt; { \"message\" =\u0026gt; \"%{DATESTAMP:logTimestamp}\\s+\" }\n }\n\n kv {}\n\n mutate {\n convert =\u0026gt; { \"duration\" =\u0026gt; \"integer\" }\n convert =\u0026gt; { \"bytes\" =\u0026gt; \"integer\" }\n convert =\u0026gt; { \"status\" =\u0026gt; \"integer\" }\n convert =\u0026gt; { \"incomingItemCnt\" =\u0026gt; \"integer\" }\n convert =\u0026gt; { \"outgoingItemCnt\" =\u0026gt; \"integer\" }\n\n gsub =\u0026gt; [ \"message\", \"\\r\", \"\" ]\n }\n\n grok {\n match =\u0026gt; { \"request\" =\u0026gt; [ \"(?:%{WORD:method} %{NOTSPACE:request}(?: HTTP/%{NUMBER:httpVersion})?)\" ] }\n overwrite =\u0026gt; [ \"request\" ] \n }\n\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eI would like Logstash to convert the time read from the log message ('logTimestamp' field) into UTC before storing it in OpenSearch.\u003c/p\u003e\n\n\u003cp\u003eCan someone assist me with that please?\u003c/p\u003e\n\n\u003cp\u003e--\u003c/p\u003e\n\n\u003cp\u003eI have added the \u003cem\u003edate\u003c/em\u003e filter to my processing, but I had to add a \u003cem\u003etimezone\u003c/em\u003e.\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e filter {\n grok {\n match =\u0026gt; { \"message\" =\u0026gt; \"%{DATESTAMP:logTimestamp}\\s+\" }\n }\n\n date {\n match =\u0026gt; [ \"logTimestamp\" , \"mm-dd-yyyy HH:mm:ss.SSS\" ]\n timezone =\u0026gt; \"Asia/Jerusalem\"\n target =\u0026gt; \"logTimestamp\"\n }\n\n ...\n }\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eIs there a way to convert the date to UTC without supplying the local timezone, such that Logstash takes the timezone of the machine it is running on?\u003c/p\u003e\n\n\u003cp\u003eThe motivation behind this question is I would like to use the same configuration file in all my deployments, in various timezones.\u003c/p\u003e","answer_count":"1","comment_count":"0","creation_date":"2016-02-14 15:56:59.2 UTC","favorite_count":"0","last_activity_date":"2016-02-15 10:55:52.107 UTC","last_edit_date":"2016-02-15 10:55:52.107 UTC","last_editor_display_name":"","last_editor_user_id":"5524030","owner_display_name":"","owner_user_id":"5524030","post_type_id":"1","score":"1","tags":"opensearch|logstash|utc","view_count":"1032"} {"id":"3173899","title":"Toggle element visibility via radio select","body":"\u003cp\u003eThis form has a hidden textara and a visible textbox. I would like to swap visibility of these elements if option \"D:\" is selected, but not sure how to correctly check which radio button is checked at any given time: \u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e\u0026lt;script language=\"JavaScript\" type=\"text/javascript\"\u0026gt;\n\nfunction unhide(event) { \n event = event || window.event ;\n target = event.target || event.srcElement; \n if(target.value === \"D:\") {\n if(target.checked) {\n document.getElementByName('tarea').style.display=''; \n document.getElementByName('tbox').style.display='none'; \n }\n }else {\n if(target.checked) {\n document.getElementByName('tarea').style.display='none'; \n document.getElementByName('tbox').style.display=''; \n }\n } \n}\n\u0026lt;/script\u0026gt;\n\u0026lt;/head\u0026gt;\n\u0026lt;body\u0026gt;\n\u0026lt;form method=\"get\" action=\"/cgi-bin/form.cgi\" enctype=\"application/x-www-form-urlencoded\"\u0026gt;\n\u0026lt;input type=\"radio\" name=\"opttype\" value=\"A:\" onclick=\"unhide(event)\" /\u0026gt;A:\n\u0026lt;input type=\"radio\" name=\"opttype\" value=\"B:\" onclick=\"unhide(event)\" /\u0026gt;B:\n\u0026lt;input type=\"radio\" name=\"opttype\" value=\"C:\" checked=\"checked\" onclick=\"unhide(event)\" /\u0026gt;C:\n\u0026lt;input type=\"radio\" name=\"opttype\" value=\"D:\" onclick=\"unhide(event)\" /\u0026gt;D:\n\u0026lt;br\u0026gt;\u0026lt;input type=\"tbox\" name=\"event\" /\u0026gt;\n\u0026lt;br\u0026gt;\u0026lt;textarea name=\"tarea\" rows=\"8\" cols=\"80\" style=\"width:580;height:130;display:none;\"\u0026gt;\u0026lt;/textarea\u0026gt;\n\u003c/code\u003e\u003c/pre\u003e","accepted_answer_id":"3174156","answer_count":"4","comment_count":"0","creation_date":"2010-07-04 06:10:30.503 UTC","last_activity_date":"2010-07-04 08:20:04.6 UTC","last_edit_date":"2010-07-04 07:36:35.403 UTC","last_editor_display_name":"","last_editor_user_id":"126562","owner_display_name":"","owner_user_id":"196096","post_type_id":"1","score":"0","tags":"javascript|onclick","view_count":"1224"} {"id":"47281777","title":"How to redirect all traffics to HTTPS only EXCEPT mobile and subdomains?","body":"\u003cp\u003eThe .htaccess file that I am using now is:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e#Force www:\nRewriteEngine On\n\nRewriteCond %{HTTP_USER_AGENT} \"!(android|blackberry|googlebot-mobile|iemobile|ipad|iphone|ipod|opera mobile|palmos|webos)\" [NC]\nRewriteCond %{HTTP_HOST} ^example\\.com [NC]\nRewriteRule ^$ http://www.example.com/ [L,R=302]\n\n\nRewriteCond %{HTTPS} off [OR]\nRewriteCond %{HTTP_HOST} !^www\\. [NC]\nRewriteCond %{HTTP_HOST} ^(?:www\\.)?(.+)$ [NC]\nRewriteRule ^ https://www.%1%{REQUEST_URI} [L,NE,R=301]\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eHowever, when I tested using my mobile I found that it is redirecting to the https only and also while accessing any subdomains it redirects to the https.\u003c/p\u003e\n\n\u003cp\u003eAm I doing something wrong in my htaccess script?\u003c/p\u003e\n\n\u003cp\u003eThanks in advance.\u003c/p\u003e","answer_count":"0","comment_count":"1","creation_date":"2017-11-14 09:15:56.563 UTC","last_activity_date":"2017-11-14 09:15:56.563 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"3243499","post_type_id":"1","score":"0","tags":"apache|.htaccess|redirect|mod-rewrite","view_count":"16"} {"id":"44615289","title":"Netty ByteBuf processing, decoders structure in the pipeline","body":"\u003cp\u003eMy server sends response to the client or forward the message to another client depends on message content.\nI need to use 8 bytes messages: 6 encrypted bytes between braces, for example: \u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e0x3C 0xE1 0xE2 0xE3 0xE04 0xE5 0xE6 0x3E\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eWhere 0x3C means \u0026lt; sign as an opening frame marker, and 0x3E means \u003e sign as closing frame marker. \u003c/p\u003e\n\n\u003cp\u003eIf internal 6 encrypted bytes (0xE1 0x02 0x03 0x04 0x05 0x06) are decrypted successfully, data contains same markers again:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e0x3C 0x3C 0x02 0x03 0x04 0x05 0x3E 0x3E\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eSo I get 4 bytes payload (0x02 0x03 0x04 0x05).\u003c/p\u003e\n\n\u003cp\u003eI have already written a FrameDecoder, but now I can't decide to strip the braces bytes or not:\u003c/p\u003e\n\n\u003cul\u003e\n\u003cli\u003eI want to write clean code, braces are only frame markers so they belong to FrameDecoder responsibility. This means for me FrameDecoder needs to strip them. But on forwarding, FrameEncoder needs to add them again (on reponse encoding too). I can simply write the closing marker into the buffer but I don't know how can I write single byte to the beginning of Bytebuf efficiently.\u003c/li\u003e\n\u003cli\u003eIf I do not strip markers, it looks not so clean solution, but I can forward the entire received Bytebuf (after encryption) or last handler can allocate 8 bytes for the entire Bytebuf on reponse sending.\u003c/li\u003e\n\u003c/ul\u003e","answer_count":"1","comment_count":"0","creation_date":"2017-06-18 13:02:58.413 UTC","last_activity_date":"2017-06-20 07:23:34.603 UTC","last_edit_date":"2017-06-18 13:19:12.1 UTC","last_editor_display_name":"","last_editor_user_id":"7973330","owner_display_name":"","owner_user_id":"7973330","post_type_id":"1","score":"0","tags":"java|netty","view_count":"49"} @@ -3178,7 +3178,7 @@ {"id":"15118380","title":"Image not changing in android app","body":"\u003cp\u003eI'm trying to change an image resource with .setImageResource(identifier) but it is not showing up when i'm using the variables i'm using right now. It will work when i fill in the name of the image by myself.\u003c/p\u003e\n\n\u003cp\u003eHere is the Index.java file:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e package com.example.whs;\n\nimport java.util.ArrayList;\nimport java.util.HashMap;\n\nimport android.app.Activity;\nimport android.content.Intent;\nimport android.os.Bundle;\nimport android.view.Menu;\nimport android.view.View;\nimport android.widget.AdapterView;\nimport android.widget.AdapterView.OnItemClickListener;\nimport android.widget.ListView;\n\npublic class Index extends Activity {\n\n public static final Object TITLE = \"title\";\n public static final Object SUBTITLE = \"subtitle\";\n public static final Object THUMBNAIL = \"thumbnail\";\n protected static final String POSITION = null;\n\n @Override\n protected void onCreate(Bundle savedInstanceState) {\n super.onCreate(savedInstanceState);\n setContentView(R.layout.activity_index);\n\n buildMenu();\n }\n\n @Override\n public boolean onCreateOptionsMenu(Menu menu) {\n // Inflate the menu; this adds items to the action bar if it is present.\n getMenuInflater().inflate(R.menu.index, menu);\n return true;\n }\n\n //Builds the menu for listview\n public void buildMenu(){\n ArrayList\u0026lt;HashMap\u0026lt;String, String\u0026gt;\u0026gt; menu = new ArrayList\u0026lt;HashMap\u0026lt;String, String\u0026gt;\u0026gt;();\n //Arrays for info\n String[] menuTitleArray = {\"Updates\", \"Gallerij\"}; \n String[] menuSubtitleArray = {\"Bekijk updates\", \"Bekijk foto's en geef reacties\", \"Bekijk de updates\"};\n String[] menuThumbnailArray = {\"updates\", \"gallery\"};\n for(int i=0; i \u0026lt; menuTitleArray.length; i++){\n // Build Hashmap for the item\n HashMap\u0026lt;String, String\u0026gt; item = new HashMap\u0026lt;String, String\u0026gt;();\n item.put((String) TITLE, menuTitleArray[i]);\n item.put((String) SUBTITLE, menuSubtitleArray[i]);\n item.put((String) THUMBNAIL, menuThumbnailArray[i]);\n menu.add(item);\n }\n\n\n // Add adapter to the list\n MenuAdapter adapter = new MenuAdapter(this, menu);\n ListView list = (ListView)findViewById(R.id.list);\n list.setAdapter(adapter);\n\n\n\n // Initialize the click event\n list.setOnItemClickListener(new OnItemClickListener(){\n @Override\n public void onItemClick(AdapterView\u0026lt;?\u0026gt; parent, View view, int position, long id){\n switch(position){\n case 0:\n Intent intent = new Intent(Index.this, Updates.class);\n startActivity(intent);\n }\n }\n });\n\n }\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003ehere is the MenuAdapter.java file:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003epackage com.example.whs;\n\nimport java.util.ArrayList;\nimport java.util.HashMap;\n\nimport android.app.Activity;\nimport android.graphics.drawable.Drawable;\nimport android.view.LayoutInflater;\nimport android.view.View;\nimport android.view.ViewGroup;\nimport android.widget.BaseAdapter;\nimport android.widget.ImageView;\nimport android.widget.TextView;\n\npublic class MenuAdapter extends BaseAdapter{\n // Define variables\n ArrayList\u0026lt;HashMap\u0026lt;String, String\u0026gt;\u0026gt; data;\n Activity activity;\n private LayoutInflater inflater=null;\n\n public MenuAdapter(Activity a, ArrayList\u0026lt;HashMap\u0026lt;String, String\u0026gt;\u0026gt; d) {\n activity = a;\n data = d;\n inflater = LayoutInflater.from (a);\n }\n\n @Override\n public int getCount() {\n return data.size();\n }\n\n @Override\n public Object getItem(int position) {\n // TODO Auto-generated method stub\n return position;\n }\n\n @Override\n public long getItemId(int position) {\n // TODO Auto-generated method stub\n return position;\n }\n\n @Override\n public View getView(int position, View convertView, ViewGroup parent) {\n View vi=convertView;\n if(convertView==null)\n vi = inflater.inflate(R.layout.list_row, null); \n vi.setBackgroundResource(activity.getResources().getIdentifier(\"list_selector\", \"drawable\", Index.class.getPackage().getName()));\n // Focus on the parts that have to be changed\n TextView title = (TextView)vi.findViewById(R.id.title); // title\n TextView subtitle = (TextView)vi.findViewById(R.id.subtitle); // subtitle\n ImageView thumb_image=(ImageView)vi.findViewById(R.id.list_image); // thumb image\n\n // Get the info from the hashmap with the arraylist position\n HashMap\u0026lt;String, String\u0026gt; item = new HashMap\u0026lt;String, String\u0026gt;();\n item = data.get(position);\n String name = (String) Index.THUMBNAIL;\n // Look for the image\n int identifier = activity.getResources().getIdentifier(name, \"drawable\", Index.class.getPackage().getName());\n\n // Setting all values in listview\n title.setText(item.get(Index.TITLE));\n subtitle.setText(item.get(Index.SUBTITLE));\n thumb_image.setImageResource(identifier);\n return vi;\n }\n\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eIs there a way how to fix this?\u003c/p\u003e","accepted_answer_id":"15118889","answer_count":"1","comment_count":"5","creation_date":"2013-02-27 17:27:47.273 UTC","last_activity_date":"2013-02-27 17:53:08.607 UTC","last_edit_date":"2013-02-27 17:36:27.7 UTC","last_editor_display_name":"user2108957","owner_display_name":"user2108957","post_type_id":"1","score":"1","tags":"android|image|adapter","view_count":"209"} {"id":"7767622","title":"Debugging amf remote calls (from flex 4) in PHP Eclipse","body":"\u003cp\u003eI have installed and setup xdebug to debug php application. However I was wonder is it possible to debug the remote calls? I am using amfphp, I want to put break points and debug the code when the flex application calls the service. Is it possible? how to do it? Or Is there any way to simulate remote call called from flex 4 withing eclipse?\u003c/p\u003e\n\n\u003cp\u003eThanks in Advance\u003c/p\u003e\n\n\u003cp\u003e[edit]\nI have used xdebug pugin for firefox and chrome extension but both seems not working after I have installed them. Basically there is no hint/clue/document explaining how to use them, sadly. Can any one help?\u003c/p\u003e","accepted_answer_id":"7776365","answer_count":"1","comment_count":"0","creation_date":"2011-10-14 12:32:03.903 UTC","last_activity_date":"2011-10-15 07:05:28.043 UTC","last_edit_date":"2011-10-15 06:56:45.377 UTC","last_editor_display_name":"","last_editor_user_id":"310967","owner_display_name":"","owner_user_id":"310967","post_type_id":"1","score":"0","tags":"eclipse|flex4|xdebug|amfphp","view_count":"592"} {"id":"27654990","title":"how to get folder name in this","body":"\u003cp\u003ehello every ine in this i get file name and i crate hyperlink on it but foldername is missing to further action my code is \u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e\u0026lt;script type=\"text/javascript\"\u0026gt;\n$(document).ready(function(){\nvar files=\u0026lt;?php echo json_encode($files);?\u0026gt;;\nvar file_tree=build_file_tree(files);\nfile_tree.appendTo('#files');\n\nfunction build_file_tree(files){\n var tree=$('\u0026lt;ul\u0026gt;');\n for(x in files){\n\n if(typeof files[x]==\"object\"){\n var span=$('\u0026lt;span\u0026gt;').html(x).appendTo(\n $('\u0026lt;li\u0026gt;').appendTo(tree).addClass('folder')\n );\n\n var subtree=build_file_tree(files[x]).hide();\n span.after(subtree);\n span.click(function(){\n\n $(this).parent().find('ul:first').toggle();\n });\n\n }else{\n $('\u0026lt;li\u0026gt;').html('\u0026lt;a href=\"/admin/appearance/?theme='+tree+'\u0026amp;file='+files[x]+'\"\u0026gt;'+files[x]+'\u0026lt;/a\u0026gt;').appendTo(tree).addClass('file');\n }\n }\n\n return tree;\n\n}\n\n} );\n\n\u0026lt;/script\u0026gt;\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003ei want folder name after theme=\u003c/p\u003e","answer_count":"1","comment_count":"4","creation_date":"2014-12-26 09:02:13.577 UTC","last_activity_date":"2014-12-26 10:06:16.65 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"4226258","post_type_id":"1","score":"0","tags":"php|jquery","view_count":"64"} -{"id":"30842530","title":"MultiMatch query with Nest and Field Suffix","body":"\u003cp\u003eUsing Elasticsearch I have a field with a suffix - string field with a .english suffix with an english analyser on it as shown in the following mapping\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e...\n\"valueString\": {\n \"type\": \"string\",\n \"fields\": {\n \"english\": {\n \"type\": \"string\",\n \"analyzer\": \"english\"\n }\n }\n}\n...\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eThe following query snippet won't compile because \u003ccode\u003eValueString\u003c/code\u003e has no \u003ccode\u003eEnglish\u003c/code\u003e property.\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e...\nsh =\u0026gt; sh\n .Nested(n =\u0026gt; n\n .Path(p =\u0026gt; p.ScreenData)\n .Query(nq =\u0026gt; nq\n .MultiMatch(mm =\u0026gt; mm\n .Query(searchPhrase)\n .OnFields(\n f =\u0026gt; f.ScreenData.First().ValueString,\n f =\u0026gt; f.ScreenData.First().ValueString.english)\n .Type(TextQueryType.BestFields)\n )\n )\n )...\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eIs there a way to strongly type the suffix at query time in NEST or do I have to use magic strings?\u003c/p\u003e","accepted_answer_id":"30843154","answer_count":"1","comment_count":"0","creation_date":"2015-06-15 10:11:22.09 UTC","last_activity_date":"2015-06-15 10:43:10.693 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"565804","post_type_id":"1","score":"1","tags":"c#|elasticsearch|nest","view_count":"1188"} +{"id":"30842530","title":"MultiMatch query with Nest and Field Suffix","body":"\u003cp\u003eUsing OpenSearchI have a field with a suffix - string field with a .english suffix with an english analyser on it as shown in the following mapping\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e...\n\"valueString\": {\n \"type\": \"string\",\n \"fields\": {\n \"english\": {\n \"type\": \"string\",\n \"analyzer\": \"english\"\n }\n }\n}\n...\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eThe following query snippet won't compile because \u003ccode\u003eValueString\u003c/code\u003e has no \u003ccode\u003eEnglish\u003c/code\u003e property.\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e...\nsh =\u0026gt; sh\n .Nested(n =\u0026gt; n\n .Path(p =\u0026gt; p.ScreenData)\n .Query(nq =\u0026gt; nq\n .MultiMatch(mm =\u0026gt; mm\n .Query(searchPhrase)\n .OnFields(\n f =\u0026gt; f.ScreenData.First().ValueString,\n f =\u0026gt; f.ScreenData.First().ValueString.english)\n .Type(TextQueryType.BestFields)\n )\n )\n )...\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eIs there a way to strongly type the suffix at query time in NEST or do I have to use magic strings?\u003c/p\u003e","accepted_answer_id":"30843154","answer_count":"1","comment_count":"0","creation_date":"2015-06-15 10:11:22.09 UTC","last_activity_date":"2015-06-15 10:43:10.693 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"565804","post_type_id":"1","score":"1","tags":"c#|opensearch|nest","view_count":"1188"} {"id":"40569198","title":"How to store database into struct using swift3?","body":"\u003cp\u003eI have a function to get the database and return it in MutableArray, now I need the database to be in a struct.\u003c/p\u003e\n\n\u003cp\u003eDo I need to get the MutableArray into struct or should I get the data straight into the struct?\u003c/p\u003e\n\n\u003cp\u003eI have no idea how to approach this or how to store the database into struct \u003c/p\u003e\n\n\u003cp\u003eMy code:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eclass CrimesInfo: NSObject {\n\nvar name: String = String()\nvar detail: String = String()\nvar time: String = String()\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eThe function:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003efunc getAllCrimesData() -\u0026gt; NSMutableArray {\n sharedInstance.database!.open()\n let resultSet: FMResultSet! = sharedInstance.database!.executeQuery(\"SELECT * FROM CrimeTable\", withArgumentsIn: nil)\n let marrCrimesInfo : NSMutableArray = NSMutableArray()\n if (resultSet != nil) {\n while resultSet.next() {\n let crimesInfo : CrimesInfo = CrimesInfo()\n crimesInfo.name = resultSet.string(forColumn: \"Name\")\n crimesInfo.detail = resultSet.string(forColumn: \"Detail\")\n crimesInfo.time = resultSet.string(forColumn: \"Time\")\n marrCrimesInfo.add(crimesInfo)\n }\n }\n\u003c/code\u003e\u003c/pre\u003e","accepted_answer_id":"40569688","answer_count":"1","comment_count":"0","creation_date":"2016-11-13 00:20:57.203 UTC","last_activity_date":"2016-11-13 01:51:07.483 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"6705849","post_type_id":"1","score":"0","tags":"ios|swift3","view_count":"170"} {"id":"38263373","title":"To_Date, To_Char in oracle","body":"\u003cp\u003eMy Query for on oracle DB is:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eSELECT NBR, START_TIME,END_TIME, BYTES_DATA\nFROM TABLE_NAME Partition (P201607)\nWHERE BYTES_DATA \u0026lt;\u0026gt; 0 AND NBR LIKE '%29320319%'\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eand results in:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eNBR START_TIME END_TIME BYTES_DATA \n1029320319 2016-07-01 00:15:51 2016-07-01 00:22:44 158014048\n1029320319 2016-07-01 00:22:51 2016-07-01 01:22:51 616324863 \n1029320319 2016-07-01 01:22:51 2016-07-01 01:55:15 431354240 \n1029320319 2016-07-01 01:55:22 2016-07-01 02:53:45 1040869155 \n1029320319 2016-07-01 02:53:52 2016-07-01 03:53:52 40615861 \n1029320319 2016-07-04 07:22:05 2016-07-04 07:22:05 4911\n1029320319 2016-07-05 06:42:56 2016-07-05 07:42:56 58271774\n1029320319 2016-07-05 07:42:56 2016-07-05 07:42:56 173\n1029320319 2016-07-08 07:47:01 2016-07-08 07:47:01 105995\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eBut I would like to filter these output based on Time. How can I get all records during this month(07) or last 7 days where the start_time and end_time is between 06:30:00 and 07:59:59? \u003c/p\u003e","accepted_answer_id":"38284821","answer_count":"1","comment_count":"0","creation_date":"2016-07-08 09:34:30.6 UTC","last_activity_date":"2016-07-09 18:10:40.19 UTC","last_edit_date":"2016-07-08 09:38:09.347 UTC","last_editor_display_name":"","last_editor_user_id":"164909","owner_display_name":"","owner_user_id":"6509716","post_type_id":"1","score":"0","tags":"oracle-sqldeveloper","view_count":"91"} {"id":"38844041","title":"Web scrape password protected website but there are errors","body":"\u003cp\u003eI am trying to scrape data from the member directory of a website (\"members.dublinchamber.ie\"). I have tried using the 'rvest' but I got the data from the login page even after entering the login details. The code is as follows:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003elibrary(rvest)\nurl \u0026lt;- \"members.dublinchamber.ie/login.aspx\"\npgsession \u0026lt;- html_session(url) \npgform \u0026lt;- html_form(pgsession)[[2]]\nfilled_form \u0026lt;- set_values(pgform,\n \"Username\" = \"username\",\n \"Password\" = \"password\")\nsubmit_form(pgsession, filled_form)\nmemberlist \u0026lt;- jump_to(pgsession,'members.dublinchamber.ie/directory/profile.aspx?compid=50333')\npage \u0026lt;- read_html(memberlist)\nusernames \u0026lt;- html_nodes(x = page, css = 'css of required data')\ndata_usernames \u0026lt;- data.frame(html_text(usernames, trim = TRUE),stringsAsFactors = FALSE)\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eI also used RCurl and again I'm getting data from the login page. The RCurl code is as follows:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003elibrary(RCurl)\ncurl = getCurlHandle()\ncurlSetOpt(cookiejar = 'cookies.txt', followlocation = TRUE, autoreferer = TRUE, curl = curl)\nhtml \u0026lt;- getURL('http://members.dublinchamber.ie/login.aspx', curl = curl)\nviewstate \u0026lt;- as.character(sub('.*id=\"__VIEWSTATE\" value=['142555296'].*', '\\\\1', html))\nparams \u0026lt;- list(\n 'ctl00$ContentPlaceHolder1$ExistingMembersLogin1$username'= 'username',\n 'ctl00$ContentPlaceHolder1$ExistingMembersLogin1$password'= 'pass',\n 'ctl00$ContentPlaceHolder1$ExistingMembersLogin1$btnSubmit'= 'login',\n '__VIEWSTATE' = viewstate\n)\nhtml = postForm('http://members.dublinchamber.ie/login.aspx', .params = params, curl = curl)\n grep('Logout', html)\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eThere are 3 URL's actually:\n1) members.dublinchamber.ie/directory/default.aspx(has the names of all industry and it is required to click on any industry)\n2) members.dublinchamber.ie/directory/default.aspx?industryVal=AdvMarPubrel (the advmarpubrel is just a small string which is generated as i clicked that industry)\n3) members.dublinchamber.ie/directory/profile.aspx?compid=19399 (this has the profile information of a specific company which i clicked in the previous page)\u003c/p\u003e\n\n\u003cp\u003ei want to scrape data which should give me industry name, list of companies in each industry and their details which are present as a table in the 3rd URL above.\nI am new here and also to R, webscrape. Please don't mind if the question was lengthy or not that clear.\u003c/p\u003e","answer_count":"0","comment_count":"6","creation_date":"2016-08-09 06:54:49.677 UTC","last_activity_date":"2016-08-09 06:54:49.677 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"6694101","post_type_id":"1","score":"0","tags":"r|web-scraping|password-protection","view_count":"212"} @@ -3204,7 +3204,7 @@ {"id":"33768447","title":"Incorrect number of bindings supplied python","body":"\u003cp\u003eI'm executing the following query in sqllite\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eidP = cur.execute('SELECT id from profs where name = ?',name)\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eI have a database table like this:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e| id | name |\n| 1 | xxxxxx |\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003ebut I got an error : Incorrect number of bindings supplied. The current statement uses 1, and there are 6 supplied.\u003c/p\u003e\n\n\u003cp\u003eI think that the string 'xxxxxx' is seen as six individual characters.\u003c/p\u003e","accepted_answer_id":"33768486","answer_count":"2","comment_count":"0","creation_date":"2015-11-17 22:38:59.177 UTC","last_activity_date":"2015-11-17 23:23:33.48 UTC","last_edit_date":"2015-11-17 23:11:37.997 UTC","last_editor_display_name":"","last_editor_user_id":"2990008","owner_display_name":"","owner_user_id":"5574149","post_type_id":"1","score":"1","tags":"python|sqlite","view_count":"44"} {"id":"39574222","title":"xpath cant select only one html tag","body":"\u003cp\u003eI am trying to get some data from a website, but when i use the following code it's return all of the matched elements, i want to return only 1st match! I've tried extract_first but it returned none!\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e# -*- coding: utf-8 -*-\nimport scrapy\nfrom gumtree.items import GumtreeItem\n\n\n\nclass FlatSpider(scrapy.Spider):\n name = \"flat\"\n allowed_domains = [\"gumtree.com\"]\n start_urls = (\n 'https://www.gumtree.com/flats-for-sale',\n )\n\n def parse(self, response):\n item = GumtreeItem()\n item['title'] = response.xpath('//*[@class=\"listing-title\"][1]/text()').extract()\n return item\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eHow to select only one element with xpath selector ? \u003c/p\u003e","accepted_answer_id":"39574339","answer_count":"2","comment_count":"0","creation_date":"2016-09-19 13:18:20.28 UTC","last_activity_date":"2016-09-22 18:36:35.86 UTC","last_edit_date":"2016-09-19 13:24:58.857 UTC","last_editor_display_name":"","last_editor_user_id":"771848","owner_display_name":"","owner_user_id":"6570112","post_type_id":"1","score":"1","tags":"python|python-3.x|xpath|web-scraping|scrapy","view_count":"48"} {"id":"341477","title":"Generic Generics in Managed C++","body":"\u003cp\u003eI want to create a \u003cstrong\u003eList\u003c/strong\u003e of \u003cstrong\u003eKeyValuePair\u003c/strong\u003es in a managed C++ project. Here is the syntax I'm using\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eList\u0026lt;KeyValuePair\u0026lt;String^, String^\u0026gt;^\u0026gt;^ thing;\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003ebut I'm getting the following error:\u003c/p\u003e\n\n\u003cblockquote\u003e\n \u003cp\u003eerror C3225: generic type argument for 'T' cannot be 'System::Collections::Generic::KeyValuePair ^', it must be a value type or a handle to a reference type\u003c/p\u003e\n\u003c/blockquote\u003e\n\n\u003cp\u003eI basically want to do this (C#)\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eList\u0026lt;KeyValuePair\u0026lt;string, string\u0026gt;\u0026gt; thing;\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003ebut in managed C++. Oh and in .Net 2.0. Any takers?\u003c/p\u003e","accepted_answer_id":"341694","answer_count":"2","comment_count":"0","creation_date":"2008-12-04 17:40:58.63 UTC","last_activity_date":"2008-12-04 19:02:23.07 UTC","last_editor_display_name":"","owner_display_name":"brian","owner_user_id":"2831","post_type_id":"1","score":"2","tags":".net|generics|.net-2.0|managed-c++","view_count":"4607"} -{"id":"35100129","title":"How to get a nested document as object in mongoosastic","body":"\u003cp\u003ei have a nodejs server with mongoosastic an try to get a nested search result as objects instead of only the indexes. \u003c/p\u003e\n\n\u003cp\u003ethats my code:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003erequire('../server/serverInit');\n\n\nvar elasticsearch = require('elasticsearch');\nvar esclient = new elasticsearch.Client({\n host: 'localhost:9200',\n log: 'trace'\n});\n\n\nvar Schema = mongoose.Schema;\nvar mongoosastic = require('mongoosastic');\n\nvar elasticsearch = require('elasticsearch');\nvar esclient = new elasticsearch.Client({\n host: '127.0.0.1:9200',\n log: 'trace'\n});\nglobal.DBModel = {};\n/**\n * StoreSchema\n * @type type\n */\n\nvar storeSchema = global.mongoose.Schema({\n Name: {type: String, es_indexed: true},\n Email: {type: String, es_indexed: true},\n .....\n _articles: {type: [articleSchema],\n es_indexed: true,\n es_type: 'nested',\n es_include_in_parent: true}\n});\n\n/**\n * ArtikelSchema\n * @type Schema\n */\n\nvar articleSchema = new Schema({ \n Name: {type: String, es_indexed: true},\n Kategorie: String,\n ....\n _stores: {type: [storeSchema],\n es_indexed: true,\n es_type: 'nested',\n es_include_in_parent: true}\n});\n\nstoreSchema.plugin(mongoosastic, {\n esClient: esclient\n});\narticleSchema.plugin(mongoosastic, {\n esClient: esclient\n});\nglobal.DBModel.Artikel = global.mongoose.model('Artikel', articleSchema);\n\nglobal.DBModel.Store = global.mongoose.model('Store', storeSchema);\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003ewhen i now fire a search from the route \"/search\" which have this example code:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eglobal.DBModel.Artikel.search({\n query_string: {\n query: \"*\"\n }\n }, {\n hydrate: true\n }, function (err, results) {\n if (err)\n return res.send(500, {error: err});\n res.send(results);\n }); \n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003ei get this result:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e...\n {\n \"_id\": \"56ab6b15352a43725a21bc92\",\n \"stores\": [\n \"56ab6b03352a43725a21bc91\"\n ],\n \"Name\": \"daaadd\",\n \"ArtikelNummer\": \"232\",\n \"__v\": 0,\n \"_stores\": []\n }\n ]\n }\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eHow i can get directly a object instead of the id \"56ab6b03352a43725a21bc91\"? \u003c/p\u003e","answer_count":"1","comment_count":"0","creation_date":"2016-01-30 09:33:13.4 UTC","last_activity_date":"2016-04-23 17:28:07.41 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"4884035","post_type_id":"1","score":"0","tags":"node.js|mongodb|elasticsearch|mongoose|mongoosastic","view_count":"595"} +{"id":"35100129","title":"How to get a nested document as object in mongoosastic","body":"\u003cp\u003ei have a nodejs server with mongoosastic an try to get a nested search result as objects instead of only the indexes. \u003c/p\u003e\n\n\u003cp\u003ethats my code:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003erequire('../server/serverInit');\n\n\nvar opensearch = require('opensearch');\nvar esclient = new opensearch.Client({\n host: 'localhost:9200',\n log: 'trace'\n});\n\n\nvar Schema = mongoose.Schema;\nvar mongoosastic = require('mongoosastic');\n\nvar opensearch = require('opensearch');\nvar esclient = new opensearch.Client({\n host: '127.0.0.1:9200',\n log: 'trace'\n});\nglobal.DBModel = {};\n/**\n * StoreSchema\n * @type type\n */\n\nvar storeSchema = global.mongoose.Schema({\n Name: {type: String, es_indexed: true},\n Email: {type: String, es_indexed: true},\n .....\n _articles: {type: [articleSchema],\n es_indexed: true,\n es_type: 'nested',\n es_include_in_parent: true}\n});\n\n/**\n * ArtikelSchema\n * @type Schema\n */\n\nvar articleSchema = new Schema({ \n Name: {type: String, es_indexed: true},\n Kategorie: String,\n ....\n _stores: {type: [storeSchema],\n es_indexed: true,\n es_type: 'nested',\n es_include_in_parent: true}\n});\n\nstoreSchema.plugin(mongoosastic, {\n esClient: esclient\n});\narticleSchema.plugin(mongoosastic, {\n esClient: esclient\n});\nglobal.DBModel.Artikel = global.mongoose.model('Artikel', articleSchema);\n\nglobal.DBModel.Store = global.mongoose.model('Store', storeSchema);\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003ewhen i now fire a search from the route \"/search\" which have this example code:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eglobal.DBModel.Artikel.search({\n query_string: {\n query: \"*\"\n }\n }, {\n hydrate: true\n }, function (err, results) {\n if (err)\n return res.send(500, {error: err});\n res.send(results);\n }); \n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003ei get this result:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e...\n {\n \"_id\": \"56ab6b15352a43725a21bc92\",\n \"stores\": [\n \"56ab6b03352a43725a21bc91\"\n ],\n \"Name\": \"daaadd\",\n \"ArtikelNummer\": \"232\",\n \"__v\": 0,\n \"_stores\": []\n }\n ]\n }\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eHow i can get directly a object instead of the id \"56ab6b03352a43725a21bc91\"? \u003c/p\u003e","answer_count":"1","comment_count":"0","creation_date":"2016-01-30 09:33:13.4 UTC","last_activity_date":"2016-04-23 17:28:07.41 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"4884035","post_type_id":"1","score":"0","tags":"node.js|mongodb|opensearch|mongoose|mongoosastic","view_count":"595"} {"id":"6481429","title":"find index of element in a list using recursion","body":"\u003cpre\u003e\u003ccode\u003edef index(L,v)\n ''' Return index of value v in L '''\n pass\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eI need help with implementing this function using recursion.\nReally new to recursion stuffs so any advices would help.!\u003c/p\u003e\n\n\u003cp\u003eNote that \u003ccode\u003eL\u003c/code\u003e is a list. \u003ccode\u003ev\u003c/code\u003e is a value.\u003c/p\u003e","answer_count":"7","comment_count":"3","creation_date":"2011-06-26 00:44:42.44 UTC","last_activity_date":"2011-06-26 01:43:03.01 UTC","last_edit_date":"2011-06-26 00:49:32.357 UTC","last_editor_display_name":"","last_editor_user_id":"396183","owner_display_name":"","owner_user_id":"815528","post_type_id":"1","score":"0","tags":"python","view_count":"5478"} {"id":"15201945","title":"xml querying with variable","body":"\u003cp\u003eI am trying to pass the xpath as parameter to the query. \u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e declare @test as nvarchar(1000) = '(ns1:Book/Authors)[1]'\n ;with XMLNAMESPACES ('MyNameSpace:V1' as ns1)\n select \n b.XmlData.value(\n '@test'\n , 'nvarchar(100)') as QueriedData \n from Books b\n where b.BookID = '1'\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eThe above statement gave the following error. \u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eXQuery [Books.XmlData.value()]: Top-level attribute nodes are not supported\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eTried it as @test, instead of '@test'. And got the following error:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eThe argument 1 of the XML data type method \"value\" must be a string literal.\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eTried it using 'sql:variable(@test)' and get this error: \u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eXQuery [Books.XmlData.value()]: A string literal was expected\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eTried it as 'sql:variable(\"@test\")' and it shows the value in @test as QueriedData, which is wrong \u003c/p\u003e\n\n\u003cp\u003ePlease tell me what am I missing here\u003c/p\u003e","accepted_answer_id":"15203967","answer_count":"1","comment_count":"1","creation_date":"2013-03-04 12:46:28.233 UTC","last_activity_date":"2013-03-04 14:47:07.283 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"2081289","post_type_id":"1","score":"0","tags":"sql-server|xml","view_count":"1860"} {"id":"7279001","title":"Loading large data","body":"\u003cp\u003eI hava a datatable with large amount of data (250K).\u003cbr\u003e\nI have used DevExpress component and nhibernate.\u003cbr\u003e\nIn devexpress components is server mode, but it does not suit me because I am using nHibernate.\u003cbr\u003e\nIn the table is many column as well. And 5 relation tables which displays together with main table (250K records).\n What a best way to advise me to achieve the goal?\u003cbr\u003e\nThanks and sorry for my English.\u003c/p\u003e\n\n\u003cp\u003e\u003cstrong\u003eEDIT:\u003c/strong\u003e\u003cbr\u003e\nHow to implement loading data with small portions?\u003c/p\u003e","accepted_answer_id":"7279537","answer_count":"2","comment_count":"1","creation_date":"2011-09-02 03:40:26.237 UTC","last_activity_date":"2011-09-02 05:59:19.53 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"348173","post_type_id":"1","score":"1","tags":"c#|nhibernate|devexpress","view_count":"477"} @@ -3227,7 +3227,7 @@ {"id":"28924319","title":"Code example POJO and DTO","body":"\u003cp\u003eI don't understand the difference between POJO and DTO in Java. I have read the article here: \u003ca href=\"https://stackoverflow.com/questions/1425302/what-is-the-difference-between-pojo-plain-old-java-object-and-dto-data-transf\"\u003eWhat is the Difference Between POJO (Plain Old Java Object) and DTO (Data Transfer Object)?\u003c/a\u003e . But I still don't understand the code implementation between them, what makes them different. Can you give the code example for each of them? Thank you so much before!\u003c/p\u003e","answer_count":"3","comment_count":"0","creation_date":"2015-03-08 08:00:07.067 UTC","favorite_count":"2","last_activity_date":"2015-03-08 08:12:31.05 UTC","last_edit_date":"2017-05-23 12:14:57.453 UTC","last_editor_display_name":"","last_editor_user_id":"-1","owner_display_name":"","owner_user_id":"4468740","post_type_id":"1","score":"1","tags":"java|dto|pojo","view_count":"612"} {"id":"45713706","title":"Include bootstrap features into a custom vuejs component","body":"\u003cp\u003eI'm asked to add a new html page to a \u003ca href=\"/questions/tagged/vuejs\" class=\"post-tag\" title=\"show questions tagged \u0026#39;vuejs\u0026#39;\" rel=\"tag\"\u003evuejs\u003c/a\u003e project. This page is already fully developed and is based on \u003ca href=\"/questions/tagged/jquery\" class=\"post-tag\" title=\"show questions tagged \u0026#39;jquery\u0026#39;\" rel=\"tag\"\u003ejquery\u003c/a\u003e and some \u003ca href=\"/questions/tagged/twitter-bootstrap\" class=\"post-tag\" title=\"show questions tagged \u0026#39;twitter-bootstrap\u0026#39;\" rel=\"tag\"\u003etwitter-bootstrap\u003c/a\u003e features.\u003c/p\u003e\n\n\u003cp\u003eI've created a new component.\u003c/p\u003e\n\n\u003cp\u003e\u003cstrong\u003enewPage.vue\u003c/strong\u003e\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e\u0026lt;template\u0026gt;\n \u0026lt;div id=\"my-new-page\"\u0026gt;\n ...\n \u0026lt;/div\u0026gt;\n\u0026lt;/template\u0026gt;\n\n\u0026lt;style src=\"path/to/_bootstrap.scss\" lang=\"scss\" scoped\u0026gt;\u0026lt;/style\u0026gt;\n\u0026lt;style src=\"path/to/font-awesome.scss\" lang=\"scss\" scoped\u0026gt;\u0026lt;/style\u0026gt;\n\u0026lt;style src=\"path/to/animate.css\" scoped\u0026gt;\u0026lt;/style\u0026gt;\n\u0026lt;style src=\"path/to/custom/css.css\" scoped\u0026gt;\u0026lt;/style\u0026gt;\n\n\n\u0026lt;script src=\"path/to/custom/js.js\"\u0026gt;\u0026lt;/script\u0026gt;\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003e\u003cstrong\u003ejs.js\u003c/strong\u003e\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e import jQuery from 'jquery';\n // Same error even with window.$ = window.jQuery = jQuery;\n import collapse from 'path/to/bootstrap/feature/collapse.js\";\n\n export default {\n created() {\n runjQuery(jQuery);\n },\n };\n\n function runjQuery($) {\n // here is how I thought integrate the jquery script of the new html page\n $(function () {\n ....\n $('#navbar').collapse('hide');\n });\n }\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eBut this obviously does not work because \u003ccode\u003ecollapse.js\u003c/code\u003e cannot access \u003ccode\u003ejQuery\u003c/code\u003e and I get this error\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eUncaught ReferenceError: jQuery is not defined\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eHow do I fix this problem?\u003c/p\u003e\n\n\u003cp\u003eGiven that I don't want (if possible) to add bootstrap and jquery globally to my project because this will surely breaks here and there in my other components?\u003c/p\u003e","accepted_answer_id":"45715207","answer_count":"2","comment_count":"4","creation_date":"2017-08-16 12:35:40.567 UTC","last_activity_date":"2017-08-16 13:40:09.953 UTC","last_edit_date":"2017-08-16 12:44:00.5 UTC","last_editor_display_name":"","last_editor_user_id":"1507546","owner_display_name":"","owner_user_id":"1507546","post_type_id":"1","score":"0","tags":"javascript|jquery|twitter-bootstrap|vue.js","view_count":"80"} {"id":"14306200","title":"updating Rails fixtures during tests","body":"\u003cp\u003eI have a functional test in Rails (it's a Redmine plugin) which is causing me problems:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003efixtures :settings\n\ntest 'error is shown on issues#show when issue custom field is not set up' do\n setting = settings(:release_notes)\n setting.value = setting.value.\n update('issue_required_field_id' =\u0026gt; 'garbage')\n #setting.save!\n\n get :show, :id =\u0026gt; '1'\n\n assert_response :success\n assert_select 'div.flash.error',\n :text =\u0026gt; I18n.t(:failed_find_issue_custom_field)\nend\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eThe Setting model has fields \u003ccode\u003ename\u003c/code\u003e and \u003ccode\u003evalue\u003c/code\u003e; in this particular setting, the value is a hash which is serialised. One of the keys in this hash is \u003ccode\u003eissue_required_field_id\u003c/code\u003e, which is used to find a particular \u003ccode\u003eIssueCustomField\u003c/code\u003e during the show action. If there is no custom field with this ID (which there shouldn't be, because I've set it to the string 'garbage') then it should render a div.flash.error explaining what's happened.\u003c/p\u003e\n\n\u003cp\u003eUnfortunately when \u003ccode\u003esetting.save!\u003c/code\u003e is commented out, the test fails because the Setting doesn't appear to have been updated -- the working value for that setting (as appears in settings.yml) is used, and the 'div.flash.error' doesn't appear. If I uncomment it, this test passes, but others fail because the change isn't rolled back at the end of the test.\u003c/p\u003e\n\n\u003cp\u003eIs there a way of modifying a fixture like this so that any changes are rolled back at the end of the test?\u003c/p\u003e\n\n\u003cp\u003eNote: \u003ccode\u003eself.use_transactional_fixtures\u003c/code\u003e is definitely set to true in \u003ccode\u003eActiveSupport::TestCase\u003c/code\u003e (and this test case is an \u003ccode\u003eActionController::TestCase\u003c/code\u003e, which is a subclass)\u003c/p\u003e","accepted_answer_id":"15813279","answer_count":"1","comment_count":"0","creation_date":"2013-01-13 17:34:27.51 UTC","last_activity_date":"2013-04-04 13:53:40.37 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"1434220","post_type_id":"1","score":"0","tags":"ruby-on-rails|redmine|redmine-plugins","view_count":"168"} -{"id":"5771277","title":"Which Path should i take when making a 2D iPhone game?","body":"\u003cp\u003eI have been looking into Open GL ES, Quartz 2D, a framework called cocos2D and I am not sure what would be the best direction to move forward in when making a 2D game. I am planning on making a pretty simple not to intense game and I am new to game development but not to iphone development. \u003c/p\u003e\n\n\u003cp\u003eWhich would be the easiest to learn? Which one would give the best performance ?\u003c/p\u003e\n\n\u003cp\u003eThanks\u003c/p\u003e","accepted_answer_id":"5817524","answer_count":"2","comment_count":"0","creation_date":"2011-04-24 15:04:43.387 UTC","last_activity_date":"2011-04-28 11:00:34.01 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"579072","post_type_id":"1","score":"1","tags":"iphone","view_count":"99"} +{"id":"5771277","title":"Which Path should i take when making a 2D iPhone game?","body":"\u003cp\u003eI have been looking into Open GL opensearch, Quartz 2D, a framework called cocos2D and I am not sure what would be the best direction to move forward in when making a 2D game. I am planning on making a pretty simple not to intense game and I am new to game development but not to iphone development. \u003c/p\u003e\n\n\u003cp\u003eWhich would be the easiest to learn? Which one would give the best performance ?\u003c/p\u003e\n\n\u003cp\u003eThanks\u003c/p\u003e","accepted_answer_id":"5817524","answer_count":"2","comment_count":"0","creation_date":"2011-04-24 15:04:43.387 UTC","last_activity_date":"2011-04-28 11:00:34.01 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"579072","post_type_id":"1","score":"1","tags":"iphone","view_count":"99"} {"id":"24067042","title":"Adding a Hyperlink to each of WordPress' Simplicity-Lite featured-box element","body":"\u003cp\u003eI'm building a site for one of my very first clients using \u003ca href=\"http://wordpress.org/themes/simplicity-lite\" rel=\"nofollow\"\u003eWordpress' Simplicity-Lite Theme.\u003c/a\u003e\nI'd like to alter the theme somehow so as to hyperlink my images in the featured-boxes position (Right below the slideshowshow) to open up a page in the same window. \nThe problem is that the images are automatically generated/fetched by a PHP script that picks them up from the media gallery and so one script does it all for all the eight images. \nI want to make each of these images as fetched by PHP link to its own page to add interactivity to my site but I've tried several things including advice that I've received here before but all in vain both in the style.css and the featured-box.php files.\u003cbr\u003e\nBelow is a section of the PHP script in the featured-box.php file that fetches the 8 images and places them in the featured-boxes positions:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e\u0026lt;div id=\"featured-boxs\"\u0026gt;\n\u0026lt;?php foreach (range(1,8) as $fboxn) { ?\u0026gt;\n\u0026lt;span class=\"featured-box\"\u0026gt; \n\u0026lt;img class=\"box-image\" src=\"\u0026lt;?php echo of_get_option('featured-image' . $fboxn, get_template_directory_uri() . '/images/featured-image' . $fboxn . '.png') ?\u0026gt;\"/\u0026gt;\n\u0026lt;h3\u0026gt;\u0026lt;?php echo of_get_option('featured-title' . $fboxn, 'Simplicity Theme for Small Business'); ?\u0026gt;\u0026lt;/h3\u0026gt;\n\u0026lt;div class=\"content-ver-sep\"\u0026gt;\u0026lt;/div\u0026gt;\u0026lt;br /\u0026gt;\n\u0026lt;p\u0026gt;\u0026lt;?php echo of_get_option('featured-description' . $fboxn , 'The Color changing options of Simplicity will give the WordPress Driven Site an attractive look. Simplicity is super elegant and Professional Responsive Theme which will create the business widely expressed.'); ?\u0026gt;\u0026lt;/p\u0026gt;\n\u0026lt;/span\u0026gt;\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eHere is the code in the style.css file that renders the images:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e#featured-boxs{padding:0 0 10px;display:block; margin: 0 -30px; text-align:center;}\n.featured-box{width:210px;margin:0 15px 10px; display:inline-block; text-align:left; vertical-align:top;}\n\n.featured-box h3{font-family:Verdana, Geneva, sans-serif;font-weight:100;font- size:15px;color:#555555;}\n#featured-boxs h2{font-family:Verdana, Geneva, sans-serif;font-weight:100;font- size:19px;color:#555555;}\n.featured-box-first{padding:20px 0;width:210px;margin:0;}\n#featured-boxs img.box-image{border:3px solid #EEEEEE;width:202px;height:100px;}\n#featured-boxs img.box-image:hover{box-shadow:0 0 11px 0px #555555;}\n#featured-boxs img.box-icon{width:50px;height:50px;}\nh3.featured-box2{width:140px;float:right;}\n\u003c/code\u003e\u003c/pre\u003e","answer_count":"0","comment_count":"5","creation_date":"2014-06-05 18:02:00.473 UTC","last_activity_date":"2014-06-05 18:02:00.473 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"3701993","post_type_id":"1","score":"0","tags":"php|html|css|wordpress","view_count":"109"} {"id":"46013711","title":"Node.js server stops responding until control c is pressed, works for a while afterwards then repeats this, how can I stop this?","body":"\u003cp\u003eI created a node.js server that runs on AWS EC2 windows server 2016 and is run through cmd, it is used for register, sign in, (connected with a mysql database that is run on the same server) and streaming music.\u003c/p\u003e\n\n\u003cp\u003eThis has occurred when signing in and when trying to stream music, it has basically occurred in both get and post, so I don't think that is the cause. \u003c/p\u003e\n\n\u003cp\u003ethe code in question: \u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003evar express = require('express');\nvar jwt = require('jsonwebtoken');\nvar mysql = require('mysql');\nvar randtoken = require('rand-token');\nvar fs = require('fs');\n\nconst app = express();\n\nvar con = mysql.createConnection({\n host: \"localhost\",\n user: \"philip\",\n password: \"blockchain\",\n database: \"blockchainDB\"\n});\n\napp.post('/api/login', function(req, res){\n\n var sql = 'SELECT * FROM users WHERE username = ? AND password = ?';\n var valP = req.headers[\"username\"];\n var valU = req.headers[\"password\"];\n console.log('name: ' + valP, ' pass: ' + valU);\n\n con.query(sql, [valP, valU], function (err, result) {\n if (err) res.json({result: 'Something went wrong (error)'});\n if(result[0] == undefined) res.json({Login: 'Failed!'});\n else{\n console.log(result[0].name);\n const user = { id: result[0].name };\n const token = jwt.sign({ user }, 'blockchain');\n res.json({\n login: 'Success!',\n token: token,\n user: result[0].name\n });\n }\n });\n});\n\napp.post('/api/register', function(req, res){\n //auth user\n var hash = randtoken.generate(16);\n var sql = 'INSERT INTO users (user_id, name, surname, email, dateOfBirth, password,username,cellphone,isActivated,emailHash) VALUES ?';\n var val = [[\n req.headers[\"id\"],\n req.headers[\"fname\"],\n req.headers[\"lname\"],\n req.headers[\"email\"],\n req.headers[\"birthdate\"],\n req.headers[\"password\"],\n req.headers[\"username\"],\n req.headers[\"cellphone\"],\n 0,\n hash\n ]];\n\n con.query(sql, [val], function (err, result) {\n if (err) res.json({\n registered: 'failed',\n error: err\n });\n console.log('inserted val: ' + val);\n var link = hash;\n var emailAddress = req.headers['email'];\n var email = require('./app/email')(link,emailAddress);\n res.json({\n registered: 'Success!'\n });\n });\n});\n\napp.get('/api/protected', ensureToken, function(req, res){\n jwt.verify(req.token, 'blockchain', function(err, data){\n if(err){\n res.sendStatus(403);\n }\n else {\n var sql = 'SELECT name, surname FROM users WHERE NOT name IS NULL;';\n con.query(sql, function (err, result, fields, rows){\n if (err) res.json({\n result: err\n });\n res.json({\n result: result\n });\n });\n }\n })\n});\n\napp.get('/music', function(req,res){\n\n var songName = req.query.song;\n var songAlbum = req.query.album;\n var songArtist = req.query.artist;\n var file = __dirname + '/music/' + songArtist + '/' + songAlbum + '/' + songName + '.mp3';\n fs.exists(file,function(exists){\n if(exists)\n {\n var rstream = fs.createReadStream(file);\n rstream.pipe(res);\n }\n else\n {\n res.send(\"Its a 404\");\n res.end();\n }\n\n });\n});\n\n\nfunction ensureToken(req, res, next){\n const bearerHeader = req.headers[\"authentication\"];\n if(typeof bearerHeader !== 'undefined'){\n const bearer = bearerHeader.split(\" \");\n const bearerToken = bearer[1];\n req.token = bearerToken;\n next();\n }\n else {\n res.sendStatus(403);\n }\n}\n\napp.listen(8080, function(){\n console.log('App is listening on port 8080!');\n});\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eIt works perfectly fine when, as stated above, and it seems to happen at totally random times, I'm still fairly new to node.js, so any help would be much appreciated. \u003c/p\u003e","answer_count":"0","comment_count":"7","creation_date":"2017-09-02 12:20:32.607 UTC","last_activity_date":"2017-09-02 12:20:32.607 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"2890149","post_type_id":"1","score":"0","tags":"javascript|mysql|node.js|amazon-web-services","view_count":"25"} {"id":"15627364","title":"Does tinyMCE link list need to be generated by an external file","body":"\u003cp\u003eI'm trying to populate a link list in tinyMCE, but the values need to change depending on what the user has inputted in some other form fields. An array is already getting populated on the page that contains these value so wondered if it's possible to use this instead of populating with the external file like described here:\u003c/p\u003e\n\n\u003cp\u003e\u003ca href=\"http://www.tinymce.com/wiki.php/Configuration:external_link_list_url\" rel=\"nofollow\"\u003ehttp://www.tinymce.com/wiki.php/Configuration:external_link_list_url\u003c/a\u003e\u003c/p\u003e\n\n\u003cp\u003eEDIT\u003c/p\u003e\n\n\u003cp\u003eSo is there an alternative to 'external_link_list_url'. Say for example 'external_link_list_var'\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003etinyMCE.init({\n ...\n external_link_list_url : \"myexternallist.js\"\n});\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003ei'd have\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003etinyMCE.init({\n ...\n external_link_list_var : SomeVar\n});\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eIf not i guess one way to do it would be to pass the values via a query string to a php file.\u003c/p\u003e","answer_count":"1","comment_count":"0","creation_date":"2013-03-26 00:36:39.017 UTC","last_activity_date":"2013-03-26 10:19:08.417 UTC","last_edit_date":"2013-03-26 10:19:08.417 UTC","last_editor_display_name":"","last_editor_user_id":"940861","owner_display_name":"","owner_user_id":"940861","post_type_id":"1","score":"2","tags":"javascript|tinymce","view_count":"644"} @@ -3314,7 +3314,7 @@ {"id":"24324705","title":"How to select from unknown number of databases?","body":"\u003cp\u003eI want to show a customer a history of their total orders across multiple 'vendors'. Each vendor has a separate database in SQL server to store their own orders.\u003c/p\u003e\n\n\u003cp\u003eIn my database I only know which vendors the user is signed up with. So my sequence needs to go like this:\u003c/p\u003e\n\n\u003col\u003e\n\u003cli\u003eGet all the VendorIDs that the user is signed up with.\u003c/li\u003e\n\u003cli\u003eGo to the Vendor table and get their server + database name\u003c/li\u003e\n\u003cli\u003ePerform a select statement that gets all orders from each Order table in each of the Vendor databases that the user is signed up to.\u003c/li\u003e\n\u003c/ol\u003e\n\n\u003cblockquote\u003e\n\u003cpre\u003e\u003ccode\u003eDECLARE @UserID int = 999\n\nSELECT Count(OrderNumber) AS 'Orders'\n\nFROM\n--- Need some kind of loop here?\n[VendorServer].[VendorDB].[OrderTable] o1\n\nWHERE \no1.UserID = @UserID\n\u003c/code\u003e\u003c/pre\u003e\n\u003c/blockquote\u003e\n\n\u003cp\u003eHow would I get the aggregate of the total number of orders this customer made when their orders are spread across multiple databases?\u003c/p\u003e\n\n\u003cp\u003eThe User may be signed up to over 100 vendors. So it has to query across 100 databases. This is an extreme example but its possible.\u003c/p\u003e","accepted_answer_id":"24325030","answer_count":"2","comment_count":"4","creation_date":"2014-06-20 10:01:56.77 UTC","last_activity_date":"2014-06-27 08:23:50.553 UTC","last_edit_date":"2014-06-20 10:23:08.823 UTC","last_editor_display_name":"","last_editor_user_id":"1774037","owner_display_name":"","owner_user_id":"1774037","post_type_id":"1","score":"0","tags":"sql|sql-server","view_count":"58"} {"id":"35110167","title":"LibGdx texture drawn as inverse of what #getTextureData gives","body":"\u003cp\u003eI've been trying to resolve this issue I have been having with displaying a texture correctly on my libgdx desktop program.\u003c/p\u003e\n\n\u003cp\u003eI have an Orthographic camera with which when I set as:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003ecamera.setOrtho(false);\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eI get this image:\n\u003ca href=\"https://i.stack.imgur.com/s9G5D.png\" rel=\"nofollow noreferrer\"\u003e\u003cimg src=\"https://i.stack.imgur.com/s9G5D.png\" alt=\"enter image description here\"\u003e\u003c/a\u003e\u003c/p\u003e\n\n\u003cp\u003eAnd when I set it as:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003ecamera.setOrtho(true);\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eI get this image:\n\u003ca href=\"https://i.stack.imgur.com/VeMq6.png\" rel=\"nofollow noreferrer\"\u003e\u003cimg src=\"https://i.stack.imgur.com/VeMq6.png\" alt=\"enter image description here\"\u003e\u003c/a\u003e\u003c/p\u003e\n\n\u003cp\u003eThe red image is drawn with a SpriteBatch:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003ebatch.draw(texture, x, y, width, height);\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eWhile the white image is drawn from individual points plotted based on if their alpha value was 1.\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eTextureData td = texture.getTextureData();\ntd.prepare;\nPixmap map = td.consumePixmap();\nfor (int i = 0; i \u0026lt; map.getWidth(); i++)\n for (int j = 0; j \u0026lt; map.getHeight(); j++)\n if (new Color(map.getPixel(i, j)).a == 1)\n points.add(new Point(i, j));\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eAbove is the code used to get all the non-transparent pixels.\nThey are displayed as the white image, and seem to be the inverse of the original texture.\u003c/p\u003e\n\n\u003cp\u003eHowever, the points plotted and the image itself is always an inverse of one another.\u003c/p\u003e\n\n\u003cp\u003eIs there any way of resolving this issue?\u003c/p\u003e\n\n\u003cp\u003eThank you.\u003c/p\u003e\n\n\u003cp\u003ePS: I've tried multiple ways of trying to fix this:\u003c/p\u003e\n\n\u003cul\u003e\n\u003cli\u003eUsing Sprites and flipping them\u003c/li\u003e\n\u003cli\u003eUsing TextureRegions and flipping them\u003c/li\u003e\n\u003c/ul\u003e\n\n\u003cp\u003eAbsolutely nothing seems to work.\u003c/p\u003e","answer_count":"1","comment_count":"3","creation_date":"2016-01-31 03:58:16.527 UTC","favorite_count":"0","last_activity_date":"2016-01-31 20:08:02.9 UTC","last_edit_date":"2016-01-31 20:08:02.9 UTC","last_editor_display_name":"","last_editor_user_id":"3326720","owner_display_name":"","owner_user_id":"3326720","post_type_id":"1","score":"0","tags":"java|opengl|libgdx|textures","view_count":"94"} {"id":"14353202","title":"Prompt width and height of image being uploaded","body":"\u003cp\u003eI am looking for some method to prompt the width and height of the image being uploaded using an asp file upload control. Following is the code I am using to prompt message for file size. I just want to add the width and height of the image being uploaded using that upload control. Please take a note that I can't use javascript DOM here like \u003ccode\u003eimg.clientWidth\u003c/code\u003e as there is no such html object to call it. \u003c/p\u003e\n\n\u003cp\u003eHere's the JS Code \u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e$(document).ready(function () {\n $(\"#\u0026lt;%= fupNewImage.ClientID %\u0026gt;\").bind(\"change\", function () {\n var fSize = ((this.files[0].size) / 1024).toFixed(2);\n if (fSize \u0026lt;= 200) {\n $(\"#fupSize\").html('Good file size: ' + fSize + \" kb Width=? and Height=?\" ); // ? denotes the actual value to be put.\n }\n //---------More cases\n });\n});\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003e\u003c/p\u003e","answer_count":"1","comment_count":"0","creation_date":"2013-01-16 07:23:04.767 UTC","last_activity_date":"2013-01-16 07:42:26.127 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"1584140","post_type_id":"1","score":"0","tags":"javascript|jquery|asp.net|file-upload","view_count":"492"} -{"id":"43048414","title":"My android app loads successfully but shuts downs immedaitely","body":"\u003cp\u003eI'm using the Android NDK to port my C++ game over to mobile. I've debugged all the code successfully using ndk-build. And I've all successfully run ant-debug. I then installed the app onto my android device and the compiler said it was a success. However when I press the icon on my android screen, the app loads a black surface onto the screen and then shuts down after about 1 second. Could anybody suggest what might be the problem here? Has anybody experienced anything similar? I would like to know which area to start looking for the bug.\u003c/p\u003e","answer_count":"1","comment_count":"2","creation_date":"2017-03-27 13:56:15.477 UTC","last_activity_date":"2017-03-27 14:24:29.887 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"7757722","post_type_id":"1","score":"-2","tags":"android|c++|ant|opengl-es|android-ndk","view_count":"45"} +{"id":"43048414","title":"My android app loads successfully but shuts downs immedaitely","body":"\u003cp\u003eI'm using the Android NDK to port my C++ game over to mobile. I've debugged all the code successfully using ndk-build. And I've all successfully run ant-debug. I then installed the app onto my android device and the compiler said it was a success. However when I press the icon on my android screen, the app loads a black surface onto the screen and then shuts down after about 1 second. Could anybody suggest what might be the problem here? Has anybody experienced anything similar? I would like to know which area to start looking for the bug.\u003c/p\u003e","answer_count":"1","comment_count":"2","creation_date":"2017-03-27 13:56:15.477 UTC","last_activity_date":"2017-03-27 14:24:29.887 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"7757722","post_type_id":"1","score":"-2","tags":"android|c++|ant|opengl-opensearch|android-ndk","view_count":"45"} {"id":"11622966","title":"PHP taking the value from only one key in an array","body":"\u003cp\u003eHey guys so I have an array like\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eArray ( \n [more] =\u0026gt; 1 \n [routingTable] =\u0026gt; Array ( \n [0] =\u0026gt; Array ( \n [ip] =\u0026gt;fcca:948b:3c04:f481:e678:8539:a57e:197a \n [link] =\u0026gt; 90731030 \n [path] =\u0026gt; 0000.0000.271a.c907 \n [isDct] =\u0026gt; 1 \n ) \n [1] =\u0026gt; Array (\n [ip] =\u0026gt; fc1c:fc12:2735:0c17:c864:5273:c66e:558f \n [link] =\u0026gt; 74624930 \n [path] =\u0026gt; 0000.0000.006e.c907 \n [isDct] =\u0026gt; 1 \n ) \n [2] =\u0026gt; Array ( \n [ip] =\u0026gt; fcf3:2015:05f7:e2d8:39e8:51ca:1cd5:b29b \n [link] =\u0026gt; 188709805 \n [path] =\u0026gt; 0000.0000.2ab6.c387 \n [isDct] =\u0026gt; 1 \n ) \n [3] =\u0026gt; Array ( [ip] =\u0026gt; fcf6:28f2:3522:8ad0:57ad:cc26:0a6e:27a3 [link] =\u0026gt; 7331630 [path] =\u0026gt; 0000.001c.4fca.4387 [isDct] =\u0026gt; 1 ) [4] =\u0026gt; Array ( [ip] =\u0026gt; fc99:02f4:7795:c86c:36bd:63ae:cf49:d459 [link] =\u0026gt; 188709805 [path] =\u0026gt; 0000.0000.0006.4387 [isDct] =\u0026gt; 1 ) [5] =\u0026gt; Array ( [ip] =\u0026gt; fcf3:ca3a:d5a9:3552:7e71:afa7:e87c:f1ce [link] =\u0026gt; 188709805 [path] =\u0026gt; 0000.0000.0006.c387 [isDct] =\u0026gt; 1 ) [6] =\u0026gt; Array ( [ip] =\u0026gt; fc93:e5b5:7cde:7983:f50c:fe31:106b:1f88 [link] =\u0026gt; 87509810 [path] =\u0026gt; 0000.0000.004e.c387 [isDct] =\u0026gt; 1 ) [7] =\u0026gt; Array ( [ip] =\u0026gt; fcd4:1dc1:cc08:c97d:85e2:6cad:eab8:0864 [link] =\u0026gt; 188709805 [path] =\u0026gt; 0000.0000.0056.c387 [isDct] =\u0026gt; 1 ) [8] =\u0026gt; Array ( [ip] =\u0026gt; fce8:78b3:fa72:84a6:f737:e85f:7525:46a3 [link] =\u0026gt; 188709805 [path] =\u0026gt; 0000.0000.0076.c387 [isDct] =\u0026gt; 1 ) [9] =\u0026gt; Array ( [ip] =\u0026gt; fc19:e2db:7977:6d2c:15ce:4363:19cc:6bd6 [link] =\u0026gt; 127238194 [path] =\u0026gt; 0000.0000.01e1.0387 [isDct] =\u0026gt; 1 ) [10] =\u0026gt; Array ( [ip] =\u0026gt; fc1b:7538:824d:ccf2:f5da:96eb:a04a:f6e4 [link] =\u0026gt; 188709805 [path] =\u0026gt; 0000.0000.036a.4387 [isDct] =\u0026gt; 1 ) [11] =\u0026gt; Array ( [ip] =\u0026gt; fc1f:8b91:f3e8:73b9:c46f:ed52:d09f:1c81 [link] =\u0026gt; 188709805 [path] =\u0026gt; 0000.0000.029a.4387 [isDct] =\u0026gt; 1 ) [12] =\u0026gt; Array ( [ip] =\u0026gt; fcf6:28f2:3522:8ad0:57ad:cc26:0a6e:27a3 [link] =\u0026gt; 188709805 [path] =\u0026gt; 0000.0000.0000.4607 [isDct] =\u0026gt; 1 ) [13] =\u0026gt; Array ( [ip] =\u0026gt; fcf6:28f2:3522:8ad0:57ad:cc26:0a6e:27a3 [link] =\u0026gt; 279709270 [path] =\u0026gt; 0000.0000.0000.43c3 [isDct] =\u0026gt; 1 ) [14] =\u0026gt; Array ( [ip] =\u0026gt; fcc7:3fc5:d3f2:f66e:ec97:25e3:4a3d:948c [link] =\u0026gt; 188709805 [path] =\u0026gt; 0000.0000.0000.4387 [isDct] =\u0026gt; 1 ) [15] =\u0026gt; Array ( [ip] =\u0026gt; fcca:948b:3c04:f481:e678:8539:a57e:197a [link] =\u0026gt; 278098660 [path] =\u0026gt; 0000.0138.cfe9.c383 [isDct] =\u0026gt; 1 ) [16] =\u0026gt; Array ( [ip] =\u0026gt; fcf6:28f2:3522:8ad0:57ad:cc26:0a6e:27a3 [link] =\u0026gt; 9158 [path] =\u0026gt; 0000.0000.02f8.c087 [isDct] =\u0026gt; 1 ) [17] =\u0026gt; Array ( [ip] =\u0026gt; fc3a:956e:4b69:1c1e:5ebc:11a5:3e71:3e7e [link] =\u0026gt; 0 [path] =\u0026gt; 0000.0000.0348.c087 [isDct] =\u0026gt; 1 ) [18] =\u0026gt; Array ( [ip] =\u0026gt; fc93:e5b5:7cde:7983:f50c:fe31:106b:1f88 [link] =\u0026gt; 107374000 [path] =\u0026gt; 0000.0000.02ad.8087 [isDct] =\u0026gt; 1 ) [19] =\u0026gt; Array ( [ip] =\u0026gt; fc76:582b:9762:fcb6:1459:9564:f934:e02d [link] =\u0026gt; 177703971 [path] =\u0026gt; 0000.0095.8ea9.c4c7 [isDct] =\u0026gt; 1 ) [20] =\u0026gt; Array ( [ip] =\u0026gt; fc41:bcf4:4c13:5cc6:d96d:aadc:74c2:df2a [link] =\u0026gt; 177703970 [path] =\u0026gt; 0000.008b.8ea9.c4c7 [isDct] =\u0026gt; 1 ) [21] =\u0026gt; Array ( [ip] =\u0026gt; fce9:df87:2170:6a3d:e0d4:67a5:c82d:1bc0 [link] =\u0026gt; 177703970 [path] =\u0026gt; 0000.008d.0ea9.c4c7 [isDct] =\u0026gt; 1 ) [22] =\u0026gt; Array ( [ip] =\u0026gt; fc56:926d:c133:bc89:e6eb:640c:aa4e:0cb2 [link] =\u0026gt; 774372 [path] =\u0026gt; 0000.0092.0ea9.c4c7 [isDct] =\u0026gt; 1 ) [23] =\u0026gt; Array ( [ip] =\u0026gt; fcd9:c8a0:c35c:ba2e:e3de:b497:8706:2aab [link] =\u0026gt; 99320952 [path] =\u0026gt; 0000.008e.0ea9.c4c7 [isDct] =\u0026gt; 1 ) [24] =\u0026gt; Array ( [ip] =\u0026gt; fcac:541e:9c5c:9ddc:f648:962a:2892:e33e [link] =\u0026gt; 0 [path] =\u0026gt; 0000.0086.8ea9.c4c7 [isDct] =\u0026gt; 1 ) [25] =\u0026gt; Array ( [ip] =\u0026gt; fcd4:1dc1:cc08:c97d:85e2:6cad:eab8:0864 [link] =\u0026gt; 188709805 [path] =\u0026gt; 0000.0083.8ea9.c4c7 [isDct] =\u0026gt; 1 ) [26] =\u0026gt; Array ( [ip] =\u0026gt; fc36:4345:785d:cbe6:fc6d:5d61:507a:d721 [link] =\u0026gt; 25769760 [path] =\u0026gt; 0000.008f.0ea9.c4c7 [isDct] =\u0026gt; 1 ) [27] =\u0026gt; Array ( [ip] =\u0026gt; fca5:372b:57be:78aa:e490:6b0f:da2a:c882 [link] =\u0026gt; 43486470 [path] =\u0026gt; 0000.0094.0ea9.c4c7 [isDct] =\u0026gt; 1 ) [28] =\u0026gt; Array ( [ip] =\u0026gt; fc73:81dc:9b2e:3095:cd37:0214:114e:d27f [link] =\u0026gt; 0 [path] =\u0026gt; 0000.049c.46a4.8047 [isDct] =\u0026gt; 1 ) [29] =\u0026gt; Array ( [ip] =\u0026gt; fc30:5b55:a53d:c456:3c3d:da26:4759:3cbc [link] =\u0026gt; 132321905 [path] =\u0026gt; 0000.0091.0ea9.c4c7 [isDct] =\u0026gt; 1 ) [30] =\u0026gt; Array ( [ip] =\u0026gt; fccf:3418:31dd:4126:c23a:1d73:4a2c:15e5 [link] =\u0026gt; 774372 [path] =\u0026gt; 0000.0097.0ea9.c4c7 [isDct] =\u0026gt; 1 ) [31] =\u0026gt; Array ( [ip] =\u0026gt; fc05:2d70:2146:9298:73b9:14fb:d7a9:633a [link] =\u0026gt; 257697600 [path] =\u0026gt; 0000.0000.0007.4483 [isDct] =\u0026gt; 1 ) [32] =\u0026gt; Array ( [ip] =\u0026gt; fcf3:2015:05f7:e2d8:39e8:51ca:1cd5:b29b [link] =\u0026gt; 257697600 [path] =\u0026gt; 0000.0000.026b.4483 [isDct] =\u0026gt; 1 ) [33] =\u0026gt; Array ( [ip] =\u0026gt; fcd6:b2a5:e3cc:d78d:fc69:a90f:4bf7:4a02 [link] =\u0026gt; 264676910 [path] =\u0026gt; 0000.0000.0006.c483 [isDct] =\u0026gt; 1 ) [34] =\u0026gt; Array ( [ip] =\u0026gt; fccc:3ddb:f184:ee46:ae1a:f2a4:4f79:1ee7 [link] =\u0026gt; 34930104 [path] =\u0026gt; 0000.0000.02fa.c483 [isDct] =\u0026gt; 1 ) [35] =\u0026gt; Array ( [ip] =\u0026gt; fc98:82c4:0fca:f53e:2132:426a:c879:083c [link] =\u0026gt; 159987260 [path] =\u0026gt; 0000.0000.039a.c483 [isDct] =\u0026gt; 1 ) [36] =\u0026gt; Array ( [ip] =\u0026gt; fcf6:28f2:3522:8ad0:57ad:cc26:0a6e:27a3 [link] =\u0026gt; 7046418 [path] =\u0026gt; 0000.0000.038a.c483 [isDct] =\u0026gt; 1 ) [37] =\u0026gt; Array ( [ip] =\u0026gt; fce8:78b3:fa72:84a6:f737:e85f:7525:46a3 [link] =\u0026gt; 221190440 [path] =\u0026gt; 0000.0000.02ca.c483 [isDct] =\u0026gt; 1 ) [38] =\u0026gt; Array ( [ip] =\u0026gt; fcc7:3fc5:d3f2:f66e:ec97:25e3:4a3d:948c [link] =\u0026gt; 190857286 [path] =\u0026gt; 0000.0000.02ea.c483 [isDct] =\u0026gt; 1 ) [39] =\u0026gt; Array ( [ip] =\u0026gt; fc5d:baa5:61fc:6ffd:9554:67f0:e290:7535 [link] =\u0026gt; 190857285 [path] =\u0026gt; 0000.0000.021a.c483 [isDct] =\u0026gt; 1 ) [40] =\u0026gt; Array ( [ip] =\u0026gt; fcd9:c8a0:c35c:ba2e:e3de:b497:8706:2aab [link] =\u0026gt; 190857285 [path] =\u0026gt; 0000.0000.03ca.c483 [isDct] =\u0026gt; 1 ) [41] =\u0026gt; Array ( [ip] =\u0026gt; fcd4:1dc1:cc08:c97d:85e2:6cad:eab8:0864 [link] =\u0026gt; 264676910 [path] =\u0026gt; 0000.0000.027a.c483 [isDct] =\u0026gt; 1 ) [42] =\u0026gt; Array ( [ip] =\u0026gt; fc38:4c2c:1a8f:3981:f2e7:c2b9:6870:6e84 [link] =\u0026gt; 4294960 [path] =\u0026gt; 0000.0002.aa0b.c007 [isDct] =\u0026gt; 1 ) [43] =\u0026gt; Array ( [ip] =\u0026gt; fcd9:6fcc:642c:c70d:5ff2:63c3:8ead:c9ad [link] =\u0026gt; 0 [path] =\u0026gt; 0000.0000.0c7b.c007 [isDct] =\u0026gt; 1 ) [44] =\u0026gt; Array ( [ip] =\u0026gt; fc5b:0934:7fce:3885:7fe7:ab23:8743:3e14 [link] =\u0026gt; 124285405 [path] =\u0026gt; 0000.001a.df21.8047 [isDct] =\u0026gt; 1 ) [45] =\u0026gt; Array ( [ip] =\u0026gt; fcf6:28f2:3522:8ad0:57ad:cc26:0a6e:27a3 [link] =\u0026gt; 147102381 [path] =\u0026gt; 0000.0000.30be.c907 [isDct] =\u0026gt; 1 ) [46] =\u0026gt; Array ( [ip] =\u0026gt; fcf3:2015:05f7:e2d8:39e8:51ca:1cd5:b29b [link] =\u0026gt; 32212200 [path] =\u0026gt; 0000.0154.a496.c387 [isDct] =\u0026gt; 1 ) [47] =\u0026gt; Array ( [ip] =\u0026gt; fc97:d627:b1b6:3021:7b55:1ea3:1b3b:75a5 [link] =\u0026gt; 188709805 [path] =\u0026gt; 0000.0000.02eb.c007 [isDct] =\u0026gt; 1 ) [48] =\u0026gt; Array ( [ip] =\u0026gt; fccc:3ddb:f184:ee46:ae1a:f2a4:4f79:1ee7 [link] =\u0026gt; 26843500 [path] =\u0026gt; 0000.0000.0329.8007 [isDct] =\u0026gt; 1 ) [49] =\u0026gt; Array ( [ip] =\u0026gt; fcca:948b:3c04:f481:e678:8539:a57e:197a [link] =\u0026gt; 188709805 [path] =\u0026gt; 0000.0000.03e9.8007 [isDct] =\u0026gt; 1 ) [50] =\u0026gt; Array ( [ip] =\u0026gt; fce1:b388:04e8:c4ff:654a:3e62:2c68:77ee [link] =\u0026gt; 44560210 [path] =\u0026gt; 0000.0000.2ea9.8007 [isDct] =\u0026gt; 1 ) [51] =\u0026gt; Array ( [ip] =\u0026gt; fc3a:2804:615a:b34f:abfe:c7d5:65d6:f50c [link] =\u0026gt; 135291240 [path] =\u0026gt; 0000.0000.36a9.8007 [isDct] =\u0026gt; 1 ) [52] =\u0026gt; Array ( [ip] =\u0026gt; fcd5:c432:affb:7e77:a754:74e0:5e98:12d3 [link] =\u0026gt; 148176120 [path] =\u0026gt; 0000.0000.32a9.8007 [isDct] =\u0026gt; 1 ) [53] =\u0026gt; Array ( [ip] =\u0026gt; fc37:acb2:544b:ed86:8b8d:9945:add7:b119 [link] =\u0026gt; 142807420 [path] =\u0026gt; 0000.0000.26a9.8007 [isDct] =\u0026gt; 1 ) [54] =\u0026gt; Array ( [ip] =\u0026gt; fc38:4c2c:1a8f:3981:f2e7:c2b9:6870:6e84 [link] =\u0026gt; 188709805 [path] =\u0026gt; 0000.0000.2aa9.8007 [isDct] =\u0026gt; 1 ) [55] =\u0026gt; Array ( [ip] =\u0026gt; fc93:e5b5:7cde:7983:f50c:fe31:106b:1f88 [link] =\u0026gt; 18186471 [path] =\u0026gt; 0000.0000.33e9.8007 [isDct] =\u0026gt; 1 ) [56] =\u0026gt; Array ( [ip] =\u0026gt; fc09:c762:d144:4e53:bb79:372c:28dd:2cd6 [link] =\u0026gt; 166966572 [path] =\u0026gt; 0000.0000.1549.8007 [isDct] =\u0026gt; 1 ) [57] =\u0026gt; Array ( [ip] =\u0026gt; fcbd:9678:6e2c:568e:ea0d:6cc0:69f1:9996 [link] =\u0026gt; 123480101 [path] =\u0026gt; 0000.0001.fde9.8007 [isDct] =\u0026gt; 1 ) [58] =\u0026gt; Array ( [ip] =\u0026gt; fcef:c7a9:792a:45b3:741f:59aa:9adf:4081 [link] =\u0026gt; 81335805 [path] =\u0026gt; 0000.0000.18c7.8043 [isDct] =\u0026gt; 1 ) [59] =\u0026gt; Array ( [ip] =\u0026gt; fcb4:baa6:ca46:5255:8514:53da:28dc:1337 [link] =\u0026gt; 44560210 [path] =\u0026gt; 0000.0000.2aa9.c787 [isDct] =\u0026gt; 1 ) [60] =\u0026gt; Array ( [ip] =\u0026gt; fcef:c7a9:792a:45b3:741f:59aa:9adf:4081 [link] =\u0026gt; 137438720 [path] =\u0026gt; 0000.0001.8ca9.c787 [isDct] =\u0026gt; 1 ) [61] =\u0026gt; Array ( [ip] =\u0026gt; fc74:b146:a580:2be9:6285:7af3:6a56:2b7b [link] =\u0026gt; 0 [path] =\u0026gt; 0000.000a.22c7.8043 [isDct] =\u0026gt; 1 ) [62] =\u0026gt; Array ( [ip] =\u0026gt; fc00:9846:1c48:9a10:9c1b:3bbc:2322:face [link] =\u0026gt; 52613260 [path] =\u0026gt; 0000.0001.94a9.c787 [isDct] =\u0026gt; 1 ) [63] =\u0026gt; Array ( [ip] =\u0026gt; fc13:6176:aaca:8c7f:9f55:924f:26b3:4b14 [link] =\u0026gt; 1 [path] =\u0026gt; 0000.0000.32b6.8043 [isDct] =\u0026gt; 1 ) [64] =\u0026gt; Array ( [ip] =\u0026gt; fc2c:700f:63fa:2eb1:c360:059d:9b3e:1703 [link] =\u0026gt; 188709805 [path] =\u0026gt; 0000.0000.0034.81c7 [isDct] =\u0026gt; 1 ) [65] =\u0026gt; Array ( [ip] =\u0026gt; fc85:f077:1447:a03e:bae3:30b2:1e87:63b3 [link] =\u0026gt; 2147480 [path] =\u0026gt; 0000.0037.2a0b.c007 [isDct] =\u0026gt; 1 ) [66] =\u0026gt; Array ( [ip] =\u0026gt; fc2c:700f:63fa:2eb1:c360:059d:9b3e:1703 [link] =\u0026gt; 188709805 [path] =\u0026gt; 0000.0000.348e.c087 [isDct] =\u0026gt; 1 ) [67] =\u0026gt; Array ( [ip] =\u0026gt; fcd4:1dc1:cc08:c97d:85e2:6cad:eab8:0864 [link] =\u0026gt; 188709805 [path] =\u0026gt; 0000.0000.004e.c087 [isDct] =\u0026gt; 1 ) [68] =\u0026gt; Array ( [ip] =\u0026gt; fc7e:854a:b6fe:440b:965d:e487:58fb:5701 [link] =\u0026gt; 185220150 [path] =\u0026gt; 0000.0000.298e.c087 [isDct] =\u0026gt; 1 ) [69] =\u0026gt; Array ( [ip] =\u0026gt; fc9a:8c7e:a05f:62c0:4872:a860:0610:2e4b [link] =\u0026gt; 61203180 [path] =\u0026gt; 0000.0000.318e.c087 [isDct] =\u0026gt; 1 ) [70] =\u0026gt; Array ( [ip] =\u0026gt; fc99:02f4:7795:c86c:36bd:63ae:cf49:d459 [link] =\u0026gt; 294649 [path] =\u0026gt; 0000.0000.0056.c087 [isDct] =\u0026gt; 1 ) [71] =\u0026gt; Array ( [ip] =\u0026gt; fcf1:b5d5:d0b4:c390:9db2:3f5e:d2d2:bff2 [link] =\u0026gt; 294649 [path] =\u0026gt; 0000.0000.2196.c087 [isDct] =\u0026gt; 1 ) [72] =\u0026gt; Array ( [ip] =\u0026gt; fc58:7ce1:b011:920f:5ac7:98dd:e3c4:0c81 [link] =\u0026gt; 294650 [path] =\u0026gt; 0000.0000.3296.c087 [isDct] =\u0026gt; 1 ) [73] =\u0026gt; Array ( [ip] =\u0026gt; fcac:541e:9c5c:9ddc:f648:962a:2892:e33e [link] =\u0026gt; 0 [path] =\u0026gt; 0000.0000.2b96.c087 [isDct] =\u0026gt; 1 ) [74] =\u0026gt; Array ( [ip] =\u0026gt; fca6:83a5:de61:21da:9f57:b943:c039:ffde [link] =\u0026gt; 188709805 [path] =\u0026gt; 0000.0000.0005.ca47 [isDct] =\u0026gt; 1 ) [75] =\u0026gt; Array ( [ip] =\u0026gt; fc2c:700f:63fa:2eb1:c360:059d:9b3e:1703 [link] =\u0026gt; 0 [path] =\u0026gt; 0000.0000.03b8.c087 [isDct] =\u0026gt; 1 ) [76] =\u0026gt; Array ( [ip] =\u0026gt; fc99:02f4:7795:c86c:36bd:63ae:cf49:d459 [link] =\u0026gt; 124553840 [path] =\u0026gt; 0000.0001.7140.c707 [isDct] =\u0026gt; 1 ) [77] =\u0026gt; Array ( [ip] =\u0026gt; fc3a:956e:4b69:1c1e:5ebc:11a5:3e71:3e7e [link] =\u0026gt; 107374000 [path] =\u0026gt; 0000.00d2.7140.c707 [isDct] =\u0026gt; 1 ) [78] =\u0026gt; Array ( [ip] =\u0026gt; fc72:6c3b:8c74:68a7:d8c3:b4e0:6cbd:9588 [link] =\u0026gt; 100394690 [path] =\u0026gt; 0000.008a.7140.c707 [isDct] =\u0026gt; 1 ) [79] =\u0026gt; Array ( [ip] =\u0026gt; fcf1:b5d5:d0b4:c390:9db2:3f5e:d2d2:bff2 [link] =\u0026gt; 0 [path] =\u0026gt; 0000.0086.7140.c707 [isDct] =\u0026gt; 1 ) [80] =\u0026gt; Array ( [ip] =\u0026gt; fc58:7ce1:b011:920f:5ac7:98dd:e3c4:0c81 [link] =\u0026gt; 124553841 [path] =\u0026gt; 0000.00ca.7140.c707 [isDct] =\u0026gt; 1 ) [81] =\u0026gt; Array ( [ip] =\u0026gt; fcac:541e:9c5c:9ddc:f648:962a:2892:e33e [link] =\u0026gt; 0 [path] =\u0026gt; 0000.00ae.7140.c707 [isDct] =\u0026gt; 1 ) [82] =\u0026gt; Array ( [ip] =\u0026gt; fce1:3a9f:4546:e9e5:6646:88f9:1b18:8d18 [link] =\u0026gt; 83214850 [path] =\u0026gt; 0000.00aa.7140.c707 [isDct] =\u0026gt; 1 ) [83] =\u0026gt; Array ( [ip] =\u0026gt; fcfc:89e0:da25:3e98:23be:bc9a:4114:48d5 [link] =\u0026gt; 1 [path] =\u0026gt; 0000.00c6.7140.c707 [isDct] =\u0026gt; 1 ) [84] =\u0026gt; Array ( [ip] =\u0026gt; fc9a:62c1:75d2:b027:ca9d:9278:4a22:bc37 [link] =\u0026gt; 120795750 [path] =\u0026gt; 0000.00a2.7140.c707 [isDct] =\u0026gt; 1 ) [85] =\u0026gt; Array ( [ip] =\u0026gt; fc90:8f10:9ca3:12a1:ab12:c98d:0680:d915 [link] =\u0026gt; 122406360 [path] =\u0026gt; 0000.00b6.7140.c707 [isDct] =\u0026gt; 1 ) [86] =\u0026gt; Array ( [ip] =\u0026gt; fcc7:3fc5:d3f2:f66e:ec97:25e3:4a3d:948c [link] =\u0026gt; 0 [path] =\u0026gt; 0000.00ba.7140.c707 [isDct] =\u0026gt; 1 ) [87] =\u0026gt; Array ( [ip] =\u0026gt; fc8f:aa30:3bba:8b3e:12fd:44a5:0322:77ff [link] =\u0026gt; 79456760 [path] =\u0026gt; 0000.000b.2071.4a03 [isDct] =\u0026gt; 1 ) [88] =\u0026gt; Array ( [ip] =\u0026gt; fcf6:28f2:3522:8ad0:57ad:cc26:0a6e:27a3 [link] =\u0026gt; 113816440 [path] =\u0026gt; 0000.0001.c4fa.c907 [isDct] =\u0026gt; 1 ) [89] =\u0026gt; Array ( [ip] =\u0026gt; fc74:b146:a580:2be9:62 [isDct] =\u0026gt; 1 ) ) [isDct] =\u0026gt; 1 ) \n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eAnd I need to just grab out all the ip's (the values of the key \"ip\") and place them into a normal array like ips[]\u003c/p\u003e\n\n\u003cp\u003eIve tried a few things, but It's been so long since I've used php that it's not coming,\nAny help is great!\u003c/p\u003e","accepted_answer_id":"11622975","answer_count":"2","comment_count":"0","creation_date":"2012-07-24 01:42:28.83 UTC","last_activity_date":"2012-07-24 02:03:59.167 UTC","last_edit_date":"2012-07-24 01:55:08.247 UTC","last_editor_display_name":"","last_editor_user_id":"290221","owner_display_name":"","owner_user_id":"1284959","post_type_id":"1","score":"1","tags":"php|arrays|multidimensional-array","view_count":"258"} {"id":"18364754","title":"Creating a normal class with injections from Spring","body":"\u003cp\u003eWell, I have a normal class (LovHelper) that is responsible for doing some utils tasks. When i say \u003cstrong\u003enormal class\u003c/strong\u003e is because LovHelper.java don't have @Component, @Service or @Repository annotation. \u003c/p\u003e\n\n\u003cp\u003eInside of this \"normal class\" i wanna inject a bean from spring, but the bean is always null. Look my Class LovHelper.java bellow:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003epackage br.com.odontonew.helper; \n\nimport java.util.HashMap; \nimport java.util.List; \nimport java.util.Map; \n\nimport org.springframework.beans.factory.annotation.Autowired; \nimport org.springframework.stereotype.Component; \n\nimport br.com.odontonew.bean.Lov; \nimport br.com.odontonew.dao.BasicDAO; \n\npublic class LovHelper { \n\n @Autowired \n private BasicDAO dao; \n\n private static LovHelper instance; \n\n\n private LovHelper(){ \n\n } \n\n public static LovHelper getInstance(){ \n if (instance == null) \n instance = new LovHelper(); \n\n return instance; \n } \n\n\n public Lov getLovByCodigo(Class lovClass, String codigo){ \n Map\u0026lt;String,Object\u0026gt; map = new HashMap\u0026lt;String,Object\u0026gt;(); \n map.put(\"codigo\", codigo); \n List\u0026lt;Lov\u0026gt; lovs = (List\u0026lt;Lov\u0026gt;)dao.findByQuery(\"SELECT c FROM \"+lovClass.getName()+\" c WHERE c.codigo = :codigo\", map); \n if (lovs.size() == 1) \n return lovs.get(0); \n else \n return null; \n } \n\n\n /*Getters and Setters*/ \n public BasicDAO getDao() { \n return dao; \n } \n\n public void setDao(BasicDAO dao) { \n this.dao = dao; \n } \n\n} \n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eSo, in another class i just call: LovHelper.getInstance().getLovByCodigo(param1, param2). But i always get a NullPointerException because the bean \"dao\" within LovHelper is NULL. \u003c/p\u003e\n\n\u003cp\u003eAfter think a little i decided to change my LovHelper.java (using singleton pattern) to a Bean for Spring inject, then I put @Component annotation and remove all singleton pattern the was developed. \nAnd in another class i inject \"lovHelper\" and use like this: lovHelper.getLovByCodigo(param1, param2). This second solution works fine, but the first not.\u003c/p\u003e\n\n\u003cp\u003eFinally, my doubt is: Why the original code (as posted) don't works.\u003c/p\u003e","answer_count":"2","comment_count":"2","creation_date":"2013-08-21 18:10:13.343 UTC","favorite_count":"0","last_activity_date":"2016-09-13 21:22:25.803 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"2410547","post_type_id":"1","score":"1","tags":"java|spring","view_count":"2401"} {"id":"29959953","title":"This is googles standard code for getting current location of user on map but on some PC it shows incorrect results. what could be the reason?","body":"\u003cp\u003eThe code given below is the google's standard code for getting the current geolocation. This code works fine on my android mobile device but it shows my current location to Pune (841 kilometres from my current location), when I accessed the web page from my PC. On one of my friends pc it shows correct location while incorrect location on another friend's PC. Can any one tell why this code shows correct current location on some PC while incorrect on some other? \u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e\u0026lt;!DOCTYPE html\u0026gt;\n \u0026lt;html\u0026gt;\n \u0026lt;head\u0026gt;\n \u0026lt;title\u0026gt;Geolocation\u0026lt;/title\u0026gt;\n \u0026lt;meta name=\"viewport\" content=\"initial-scale=1.0, user-scalable=no\"\u0026gt;\n \u0026lt;meta charset=\"utf-8\"\u0026gt;\n \u0026lt;style\u0026gt;\n html, body, #map-canvas {\n height: 100%;\n margin: 0px;\n padding: 0px\n }\n \u0026lt;/style\u0026gt;\n \u0026lt;script src=\"https://maps.googleapis.com/maps/api/js?v=3.exp\u0026amp;signed_in=true\"\u0026gt;\u0026lt;/script\u0026gt;\n\n \u0026lt;script\u0026gt;\n // Note: This example requires that you consent to location sharing when\n // prompted by your browser. If you see a blank space instead of the map, this\n // is probably because you have denied permission for location sharing.\n\n var map;\n\n function initialize() {\n var mapOptions = {\n zoom: 6\n };\n map = new google.maps.Map(document.getElementById('map-canvas'),\n mapOptions);\n\n // Try HTML5 geolocation\n if(navigator.geolocation) {\n navigator.geolocation.getCurrentPosition(function(position) {\n var pos = new google.maps.LatLng(position.coords.latitude,\n position.coords.longitude);\n\n var infowindow = new google.maps.InfoWindow({\n map: map,\n position: pos,\n content: 'Location found using HTML5.'\n });\n\n map.setCenter(pos);\n }, function() {\n handleNoGeolocation(true);\n });\n } else {\n // Browser doesn't support Geolocation\n handleNoGeolocation(false);\n }\n }\n\n function handleNoGeolocation(errorFlag) {\n if (errorFlag) {\n var content = 'Error: The Geolocation service failed.';\n } else {\n var content = 'Error: Your browser doesn\\'t support geolocation.';\n }\n\n var options = {\n map: map,\n position: new google.maps.LatLng(60, 105),\n content: content\n };\n\n var infowindow = new google.maps.InfoWindow(options);\n map.setCenter(options.position);\n }\n\n google.maps.event.addDomListener(window, 'load', initialize);\n\n \u0026lt;/script\u0026gt;\n \u0026lt;/head\u0026gt;\n \u0026lt;body\u0026gt;\n \u0026lt;div id=\"map-canvas\"\u0026gt;\u0026lt;/div\u0026gt;\n \u0026lt;/body\u0026gt;\n \u0026lt;/html\u0026gt;\n\u003c/code\u003e\u003c/pre\u003e","accepted_answer_id":"29960123","answer_count":"2","comment_count":"2","creation_date":"2015-04-30 05:48:58.253 UTC","last_activity_date":"2015-04-30 06:40:04.21 UTC","last_edit_date":"2015-04-30 06:40:04.21 UTC","last_editor_display_name":"","last_editor_user_id":"4831567","owner_display_name":"","owner_user_id":"4847173","post_type_id":"1","score":"-1","tags":"javascript|html5|google-maps|geolocation","view_count":"96"} @@ -3481,7 +3481,7 @@ {"id":"8439438","title":"TTURLNavigation: Is it efficient?","body":"\u003cp\u003eSo I'm having a hard time wrapping my head around TTURLNavigation from the three20 framework. \u003c/p\u003e\n\n\u003cp\u003eAre the views being pushed onto a stack? I don't understand how one could just jump around an application without pushing and poping. I feel like if i just keep jumping to urls I am constantly pushing views onto my stack. \u003c/p\u003e\n\n\u003cp\u003eHow exactly does TTURLNavigation accomplish this. \u003c/p\u003e","answer_count":"0","comment_count":"0","creation_date":"2011-12-09 00:04:04.507 UTC","last_activity_date":"2011-12-09 00:04:04.507 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"373722","post_type_id":"1","score":"1","tags":"iphone|ios|three20","view_count":"34"} {"id":"32017321","title":"My mail server using IRedMail (postfix, dovecot, etc.) is only sending and receiving local emails?","body":"\u003cp\u003eI recently set up a mail server using IRedMail on a home server running Debian 8, using OpenLDAP, nginx in the installer. We got everything set up and configured to where we can access our mail server with roundcube (which I can access through mail.sterango.com) and Thunderbird and login to accounts just fine. We can send emails to and from accounts that are on the domain (seb@sterango.com can send and receive from postmaster@smail.sterango.com), but I am not able to send or recieve email with either of these accounts from outside sources such as my gmail account.\nHere is my main.cf\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e# See /usr/share/postfix/main.cf.dist for a commented, more complete version\n\n\n# Debian specific: Specifying a file name will cause the first\n# line of that file to be used as the name. The Debian default\n# is /etc/mailname.\n#myorigin = /etc/mailname\n\nsmtpd_banner = $myhostname ESMTP $mail_name (Debian/GNU)\nbiff = no\n\n# appending .domain is the MUA's job.\nappend_dot_mydomain = no\n\n# Uncomment the next line to generate \"delayed mail\" warnings\n#delay_warning_time = 4h\n\nreadme_directory = no\n\n# TLS parameters\nsmtpd_tls_cert_file = /etc/ssl/certs/iRedMail.crt\nsmtpd_tls_key_file = /etc/ssl/private/iRedMail.key\nsmtpd_use_tls=yes\nsmtpd_tls_session_cache_database = btree:${data_directory}/smtpd_scache\nsmtp_tls_session_cache_database = btree:${data_directory}/smtp_scache\n\n# See /usr/share/doc/postfix/TLS_README.gz in the postfix-doc package for\n# information on enabling SSL in the smtp client.\n\nsmtpd_relay_restrictions = permit_mynetworks permit_sasl_authenticated defer_unauth_destination\nmyhostname = mail.sterango.com\nalias_maps = hash:/etc/postfix/aliases\nalias_database = hash:/etc/postfix/aliases\nmyorigin = mail.sterango.com\nmydestination = \nrelayhost = \nmynetworks = 127.0.0.1\nmailbox_command = /usr/lib/dovecot/deliver\nmailbox_size_limit = 0\nrecipient_delimiter = +\ninet_interfaces = loopback-only\ntransport_maps = proxy:mysql:/etc/postfix/mysql/transport_maps_user.cf, proxy:mysql:/etc/postfix/mysql/transport_maps_domain.cf\ninet_protocols = ipv4\nvirtual_alias_domains = \nmydomain = mail.sterango.com\nallow_percent_hack = no\nswap_bangpath = no\nmynetworks_style = host\nsmtpd_data_restrictions = reject_unauth_pipelining\nsmtpd_reject_unlisted_recipient = yes\nsmtpd_reject_unlisted_sender = yes\nsmtpd_tls_protocols = !SSLv2 !SSLv3\nsmtp_tls_protocols = !SSLv2 !SSLv3\nlmtp_tls_protocols = !SSLv2 !SSLv3\nsmtpd_tls_mandatory_protocols = !SSLv2 !SSLv3\nsmtp_tls_mandatory_protocols = !SSLv2 !SSLv3\nlmtp_tls_mandatory_protocols = !SSLv2 !SSLv3\nsmtpd_tls_mandatory_exclude_ciphers = aNULL, eNULL, EXPORT, DES, RC4, MD5, PSK, aECDH, EDH-DSS-DES-CBC3-SHA, EDH-RSA-DES-CDC3-SHA, KRB5-DE5, CBC3-SHA\nsmtpd_tls_dh1024_param_file = /etc/ssl/dhparams.pem\nsmtp_tls_security_level = may\nsmtp_tls_CAfile = $smtpd_tls_CAfile\nsmtp_tls_loglevel = 0\nsmtp_tls_note_starttls_offer = yes\nsmtpd_sender_restrictions = reject_unknown_sender_domain, reject_non_fqdn_sender, reject_unlisted_sender, permit_mynetworks, reject_sender_login_mismatch, permit_sasl_authenticated\ndelay_warning_time = 0h\nmaximal_queue_lifetime = 4h\nbounce_queue_lifetime = 4h\nproxy_read_maps = $canonical_maps $lmtp_generic_maps $local_recipient_maps $mydestination $mynetworks $recipient_bcc_maps $recipient_canonical_maps $relay_domains $relay_recipient_maps $relocated_maps $sender_bcc_maps $sender_canonical_maps $smtp_generic_maps $smtpd_sender_login_maps $transport_maps $virtual_alias_domains $virtual_alias_maps $virtual_mailbox_domains $virtual_mailbox_maps $smtpd_sender_restrictions\nsmtp_data_init_timeout = 240s\nsmtp_data_xfer_timeout = 600s\nsmtpd_helo_required = yes\nsmtpd_helo_restrictions = permit_mynetworks, permit_sasl_authenticated, reject_non_fqdn_helo_hostname, reject_invalid_helo_hostname, check_helo_access pcre:/etc/postfix/helo_access.pcre\nqueue_run_delay = 300s\nminimal_backoff_time = 300s\nmaximal_backoff_time = 4000s\nenable_original_recipient = no\ndisable_vrfy_command = yes\nhome_mailbox = Maildir/\nallow_min_user = no\nmessage_size_limit = 15728640\nvirtual_minimum_uid = 2000\nvirtual_uid_maps = static:2000\nvirtual_gid_maps = static:2000\nvirtual_mailbox_base = /var/vmail\nvirtual_mailbox_domains = proxy:mysql:/etc/postfix/mysql/virtual_mailbox_domains.cf\nvirtual_mailbox_maps = proxy:mysql:/etc/postfix/mysql/virtual_mailbox_maps.cf\nvirtual_alias_maps = proxy:mysql:/etc/postfix/mysql/virtual_alias_maps.cf, proxy:mysql:/etc/postfix/mysql/domain_alias_maps.cf, proxy:mysql:/etc/postfix/mysql/catchall_maps.cf, proxy:mysql:/etc/postfix/mysql/domain_alias_catchall_maps.cf\nsender_bcc_maps = proxy:mysql:/etc/postfix/mysql/sender_bcc_maps_user.cf, proxy:mysql:/etc/postfix/mysql/sender_bcc_maps_domain.cf\nrecipient_bcc_maps = proxy:mysql:/etc/postfix/mysql/recipient_bcc_maps_user.cf, proxy:mysql:/etc/postfix/mysql/recipient_bcc_maps_domain.cf\nrelay_domains = $mydestination, proxy:mysql:/etc/postfix/mysql/relay_domains.cf\nsmtpd_sender_login_maps = proxy:mysql:/etc/postfix/mysql/sender_login_maps.cf\nsmtpd_sasl_auth_enable = yes\nsmtpd_sasl_local_domain = \nbroken_sasl_auth_clients = yes\nsmtpd_sasl_security_options = noanonymous\nsmtpd_tls_auth_only = yes\nsmtpd_recipient_restrictions = reject_unknown_recipient_domain, reject_non_fqdn_recipient, reject_unlisted_recipient, check_policy_service inet:127.0.0.1:7777, permit_mynetworks, permit_sasl_authenticated, reject_unauth_destination\nsmtpd_tls_security_level = may\nsmtpd_tls_loglevel = 0\nsmtpd_tls_CAfile = /etc/ssl/certs/iRedMail.crt\ntls_random_source = dev:/dev/urandom\nvirtual_transport = dovecot\ndovecot_destination_recipient_limit = 1\nsmtpd_sasl_type = dovecot\nsmtpd_sasl_path = private/dovecot-auth\ncontent_filter = smtp-amavis:[127.0.0.1]:10024\nsmtp-amavis_destination_recipient_limit = 1\ndefault_transport = smtp\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eAlso here are my DNS records: \u003ca href=\"http://i.imgur.com/gR2LAIZ.png\" rel=\"nofollow\"\u003ehttp://i.imgur.com/gR2LAIZ.png\u003c/a\u003e\nShould the A one point to a local IP like that? \nI will also gladly post any logs or files. Thanks!\u003c/p\u003e","answer_count":"1","comment_count":"0","creation_date":"2015-08-14 19:20:15.813 UTC","last_activity_date":"2016-10-06 14:39:10.17 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"5113523","post_type_id":"1","score":"-1","tags":"email|nginx|dns|debian|postfix-mta","view_count":"1287"} {"id":"20824901","title":"GridView: getCheckedItemPositions() return no values or wrong values","body":"\u003cp\u003eI implemented a gallery with a Grid View that includes check boxes but i have problems with getCheckedItemPositions() method. \u003c/p\u003e\n\n\u003cp\u003e\u003cstrong\u003e(1)\u003c/strong\u003e If i launch the Activity with GridView, select some items and ask which items have been selected, the method getCheckedItemPositions() doesn't return any value. \u003c/p\u003e\n\n\u003cp\u003e\u003cstrong\u003e(2)\u003c/strong\u003e If i launch another Activity, i return to Activity with the GridView and ask what items have been selected, the method getCheckedItemPositions() returns wrong values .\u003c/p\u003e\n\n\u003cp\u003eI think first of all that the implementation of getView() is wrong, for (1) probably the GridView doesn't know which items are selected. I have no ideas for the (2) instead. \u003c/p\u003e\n\n\u003cp\u003eHere the code:\u003c/p\u003e\n\n\u003cp\u003e\u003cstrong\u003eGalleryFragment getCheckedItemOnGridView()\u003c/strong\u003e: \u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eprivate void getCheckedItemOnGridView() {\n if(D) Log.d(TAG, \"getCheckedItemOnGridView(): called\");\n SparseBooleanArray checkedItemPositions = mGalleryGridView.getCheckedItemPositions();\n for (int i=0 ; i\u0026lt;checkedItemPositions.size() ; i++) {\n if(D) Log.d(TAG, \"checkedItemPositions : \" + checkedItemPositions.valueAt(i) + \" index \" + checkedItemPositions.keyAt(i));\n }\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003e\u003cstrong\u003eAdapter extends BaseAdapter getView():\u003c/strong\u003e\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eprivate SparseBooleanArray checked;\n\npublic View getView(final int position, View convertView, ViewGroup parent) {\n if(D) Log.d(TAG, \"Called: getView\");\n ViewHolder holder;\n\n if (convertView == null) {\n convertView = mLayoutInflater.inflate(R.layout.listitem_gallery, null);\n\n holder = new ViewHolder();\n holder.thumbnailView = (ImageView) convertView.findViewById(R.id.imageview_thumbnail);\n holder.checkBoxView = (CheckBox) convertView.findViewById(R.id.checkbox);\n\n holder.checkBoxView.setOnClickListener(new View.OnClickListener() {\n @Override\n public void onClick(View view) {\n if (((CheckBox) view).isChecked()) {\n checked.put((Integer) view.getTag(), true);\n\n } else {\n checked.put((Integer) view.getTag(), false);\n }\n }\n });\n\n convertView.setTag(holder);\n } else {\n holder = (ViewHolder) convertView.getTag();\n }\n\n BitmapFileImageLoader bitmapLoader = new BitmapFileImageLoader();\n bitmapLoader.loadBitmap(getItem(position), holder.thumbnailView);\n\n holder.checkBoxView.setTag(position);\n holder.checkBoxView.setChecked(checked.get(position));\n\n return convertView;\n}\n\u003c/code\u003e\u003c/pre\u003e","accepted_answer_id":"20829571","answer_count":"2","comment_count":"1","creation_date":"2013-12-29 11:54:58.817 UTC","favorite_count":"1","last_activity_date":"2013-12-29 20:12:08.897 UTC","last_editor_display_name":"","owner_display_name":"user2523485","post_type_id":"1","score":"1","tags":"android|gridview|checkbox","view_count":"974"} -{"id":"3354626","title":"Metal shading (like silver) on iphone opengl?","body":"\u003cp\u003eDoes anyone know a tutorial hat explains how to shade an object to look like \nsilver metal? (on iphone)?\nMaybe starting with a spere like in this:\n\u003ca href=\"http://iphonedevelopment.blogspot.com/2009/05/opengl-es-from-ground-up-part-5-living.html\" rel=\"nofollow noreferrer\"\u003ehttp://iphonedevelopment.blogspot.com/2009/05/opengl-es-from-ground-up-part-5-living.html\u003c/a\u003e\u003c/p\u003e\n\n\u003cp\u003eOr can this not be accomplished without the new shaders in 2.0?\u003c/p\u003e\n\n\u003cp\u003eThanks\nSebastian \u003c/p\u003e","accepted_answer_id":"3403856","answer_count":"2","comment_count":"0","creation_date":"2010-07-28 15:41:21.453 UTC","last_activity_date":"2010-08-04 08:35:38.66 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"134213","post_type_id":"1","score":"0","tags":"iphone|opengl-es","view_count":"909"} +{"id":"3354626","title":"Metal shading (like silver) on iphone opengl?","body":"\u003cp\u003eDoes anyone know a tutorial hat explains how to shade an object to look like \nsilver metal? (on iphone)?\nMaybe starting with a spere like in this:\n\u003ca href=\"http://iphonedevelopment.blogspot.com/2009/05/opengl-opensearch-from-ground-up-part-5-living.html\" rel=\"nofollow noreferrer\"\u003ehttp://iphonedevelopment.blogspot.com/2009/05/opengl-opensearch-from-ground-up-part-5-living.html\u003c/a\u003e\u003c/p\u003e\n\n\u003cp\u003eOr can this not be accomplished without the new shaders in 2.0?\u003c/p\u003e\n\n\u003cp\u003eThanks\nSebastian \u003c/p\u003e","accepted_answer_id":"3403856","answer_count":"2","comment_count":"0","creation_date":"2010-07-28 15:41:21.453 UTC","last_activity_date":"2010-08-04 08:35:38.66 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"134213","post_type_id":"1","score":"0","tags":"iphone|opengl-opensearch","view_count":"909"} {"id":"1185117","title":"Should I use thread local storage for variables that only exist in a {class,method}?","body":"\u003cp\u003eI am implementing a relatively simple thread pool with Python's \u003ccode\u003eQueue.Queue\u003c/code\u003e class. I have one producer class that contains the \u003ccode\u003eQueue\u003c/code\u003e instance along with some convenience methods, along with a consumer class that subclasses \u003ccode\u003ethreading.Thread\u003c/code\u003e. I instantiate that object for every thread I want in my pool (\"worker threads,\" I think they're called) based on an integer.\u003c/p\u003e\n\n\u003cp\u003eEach worker thread takes \u003ccode\u003eflag, data\u003c/code\u003e off the queue, processes it using its own database connection, and places the GUID of the row onto a list so that the producer class knows when a job is done.\u003c/p\u003e\n\n\u003cp\u003eWhile I'm aware that other modules implement the functionality I'm coding, the reason I'm coding this is to gain a better understanding of how Python threading works. This brings me to my question.\u003c/p\u003e\n\n\u003cp\u003eIf I store anything in a function's namespace or in the class's \u003ccode\u003e__dict__\u003c/code\u003e object, will it be thread safe?\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eclass Consumer(threading.Thread):\n def __init__(self, producer, db_filename):\n self.producer = producer\n self.conn = sqlite3.connect(db_filename) # Is this var thread safe?\n def run(self):\n flag, data = self.producer.queue.get()\n\n while flag != 'stop':\n # Do stuff with data; Is `data` thread safe?\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eI am thinking that both would be thread safe, here's my rationale:\u003c/p\u003e\n\n\u003cul\u003e\n\u003cli\u003eEach time a class is instantiated, a new \u003ccode\u003e__dict__\u003c/code\u003e gets created. Under the scenario I outline above, I don't think any other object would have a reference to this object. (Now, perhaps the situation might get more complicated if I used \u003ccode\u003ejoin()\u003c/code\u003e functionality, but I'm not...)\u003c/li\u003e\n\u003cli\u003eEach time a function gets called, it creates its own name space which exists for the lifetime of the function. I'm not making any of my variables \u003ccode\u003eglobal\u003c/code\u003e, so I don't understand how any other object would have a reference to a function variable.\u003c/li\u003e\n\u003c/ul\u003e\n\n\u003cp\u003e\u003ca href=\"https://stackoverflow.com/questions/104983/please-explain-thread-local-storage-for-python\"\u003eThis post\u003c/a\u003e addresses my question somewhat, but is still a little abstract for me.\u003c/p\u003e\n\n\u003cp\u003eThanks in advance for clearing this up for me.\u003c/p\u003e","answer_count":"3","comment_count":"0","creation_date":"2009-07-26 17:53:08.107 UTC","favorite_count":"1","last_activity_date":"2013-02-19 23:27:37.597 UTC","last_edit_date":"2017-05-23 12:14:20.03 UTC","last_editor_display_name":"","last_editor_user_id":"-1","owner_display_name":"","owner_user_id":"145350","post_type_id":"1","score":"4","tags":"python|concurrency|namespaces|multithreading","view_count":"1449"} {"id":"29648424","title":"remove session in javascript and jquery","body":"\u003cp\u003ewe all know that we can set and get \u003ccode\u003esession\u003c/code\u003e using javascript like below:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003esession.setItem(\"name\", \"value\"); \n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003esession.getItem(\"name\");\u003c/p\u003e\n\n\u003cp\u003eI need to know how to destroy particular and all session variable in javascript.\u003c/p\u003e\n\n\u003cp\u003eI used \u003ca href=\"https://www.google.co.in/?gfe_rd=cr\u0026amp;ei=kUAuVb6AGLHv8wfjy4CgCQ\u0026amp;gws_rd=ssl#q=destroy%20session%20variable%20in%20javascript\" rel=\"nofollow\"\u003egoogle\u003c/a\u003e but I'm unable to get exactly what I need. Please help me to find the solution. jQuery answers are also welcome (without plugin)\u003c/p\u003e\n\n\u003cp\u003eThank you in advance\u003c/p\u003e","accepted_answer_id":"29648548","answer_count":"2","comment_count":"1","creation_date":"2015-04-15 10:58:49.057 UTC","last_activity_date":"2015-04-15 11:29:34.083 UTC","last_edit_date":"2015-04-15 11:20:50.353 UTC","last_editor_display_name":"","last_editor_user_id":"3256749","owner_display_name":"","owner_user_id":"3256749","post_type_id":"1","score":"-2","tags":"javascript|jquery|html5|session","view_count":"18792"} {"id":"43224131","title":"Ignore all .axd file from MVC routing","body":"\u003cp\u003eIn my application, Both mvc and web form is present. \u003c/p\u003e\n\n\u003cp\u003eBut when I open any \u003ccode\u003e.aspx\u003c/code\u003e page then in \u003ccode\u003eApplication_Error\u003c/code\u003e error is thrown.\u003c/p\u003e\n\n\u003cblockquote\u003e\n \u003cp\u003eThe controller for path '/WebResource.axd' was not found or does not implement IController.\u003c/p\u003e\n \n \u003cp\u003eThe controller for path '/ScriptResource.axd' was not found or does\n not implement IController.\u003c/p\u003e\n\u003c/blockquote\u003e\n\n\u003cp\u003eI tried all these method, but none of them is working.\u003c/p\u003e\n\n\u003cul\u003e\n\u003cli\u003e\u003cp\u003eAdded at top in RegisterRoutes method.\u003c/p\u003e\n\n\u003cp\u003e\u003ccode\u003eroutes.IgnoreRoute(\"{resource}.axd/{*pathInfo}\");\n routes.IgnoreRoute(\"{*allaspx}\", new { allaspx = @\".*\\.aspx(/.*)?\" });\u003c/code\u003e\u003c/p\u003e\u003c/li\u003e\n\u003cli\u003e\u003cp\u003eCreate a FileTypeConstraint class to check file ext.\u003c/p\u003e\u003c/li\u003e\n\u003c/ul\u003e\n\n\u003cp\u003e.\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003epublic class FileTypeConstraint : IRouteConstraint\n {\n private readonly string[] MatchingFileTypes;\n\n public FileTypeConstraint(string matchingFileType)\n {\n MatchingFileTypes = new[] { matchingFileType };\n }\n\n public FileTypeConstraint(string[] matchingFileTypes)\n {\n MatchingFileTypes = matchingFileTypes;\n }\n\n public bool Match(HttpContextBase httpContext, Route route, string parameterName, RouteValueDictionary values, RouteDirection routeDirection)\n {\n if (values[\"url\"] != null)\n {\n string path = values[\"url\"].ToString();\n return MatchingFileTypes.Any(x =\u0026gt; path.ToLower().EndsWith(x, StringComparison.CurrentCultureIgnoreCase));\n }\n else\n {\n return false;\n }\n }\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eAnd added this line at top and bottom both \u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e routes.MapRoute(\n \"Defaultaxd\",\n \"{*url}\",\n new { controller = \"Home\", action = \"Index\", id = UrlParameter.Optional },\n new { myCustomConstraint = new FileTypeConstraint(new[] { \"axd\" }) }\n );\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eTried all these links.\u003c/p\u003e\n\n\u003cp\u003e\u003ca href=\"https://stackoverflow.com/questions/1666211/httphandlers-with-asp-net-mvc\"\u003eHttpHandlers with ASP.NET MVC\u003c/a\u003e\u003c/p\u003e\n\n\u003cp\u003e\u003ca href=\"https://stackoverflow.com/questions/1609726/asp-net-mvc-routing-issue\"\u003eASP.NET MVC routing issue?\u003c/a\u003e\u003c/p\u003e\n\n\u003cp\u003e\u003ca href=\"https://stackoverflow.com/questions/2500750/using-url-routing-for-web-forms-and-stoproutinghandler-for-favicon\"\u003eUsing URL Routing for Web Forms and StopRoutingHandler for Favicon\u003c/a\u003e\u003c/p\u003e","answer_count":"0","comment_count":"2","creation_date":"2017-04-05 07:05:39.503 UTC","last_activity_date":"2017-04-05 07:05:39.503 UTC","last_edit_date":"2017-05-23 11:46:25.623 UTC","last_editor_display_name":"","last_editor_user_id":"-1","owner_display_name":"","owner_user_id":"2465787","post_type_id":"1","score":"0","tags":"c#|asp.net|asp.net-mvc|asp.net-mvc-routing","view_count":"110"} @@ -3542,7 +3542,7 @@ {"id":"25893938","title":"Cell References in Cube Formula - Excel PowerPivot","body":"\u003cp\u003eI'm looking to run a \u003ccode\u003eCUBEVALUE\u003c/code\u003e formula with member expressions based on the contents of multiple different cells. For this example i have 2 cells with a value for REGION in:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eI15: Border\nI16: Midlands\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eI can reference one cell successfully using a cube value formula:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e=CUBEVALUE(\"PowerPivot Data\",\"[Measures].[Sum of WEIGHTED_IMPRESSIONS]\",\"[pvtBASE].[REGION].\u0026amp;[\"\u0026amp;I$15\u0026amp;\"]\")\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eCouldn't find a way within \u003ccode\u003eCUBEVALUE\u003c/code\u003e alone to replicate this result to reference both \u003ccode\u003eI15\u003c/code\u003e and \u003ccode\u003eI16\u003c/code\u003e so tried with a \u003ccode\u003eCUBESET\u003c/code\u003e then referencing the \u003ccode\u003eCUBESET\u003c/code\u003e in a later \u003ccode\u003eCUBEVALUE\u003c/code\u003e formula:\u003c/p\u003e\n\n\u003cp\u003eFor the \u003ccode\u003eCUBESET\u003c/code\u003e, this formula works:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e=CUBESET(\"PowerPivot Data\",{\"[pvtBASE].[REGION].\u0026amp;[Midlands]\",\"[pvtBASE].[REGION].\u0026amp;[Border]\"})\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eThis formula works:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e=CUBESET(\"PowerPivot Data\",\"[pvtBASE].[REGION].\u0026amp;[\"\u0026amp;I15\u0026amp;\"]\")\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eBut for some reason this doesn't:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e=CUBESET(\"PowerPivot Data\",{\"[pvtBASE].[REGION].\u0026amp;[\"\u0026amp;I15\u0026amp;\"]\",\"[pvtBASE].[REGION].\u0026amp;[\"\u0026amp;I16\u0026amp;\"]\"})\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eDoes anyone know how to fix the final \u003ccode\u003eCUBESET\u003c/code\u003e formula or if perhaps there is another way of fitting multiple members and cell references into a \u003ccode\u003eCUBEVALUE\u003c/code\u003e formula. \u003c/p\u003e\n\n\u003cp\u003eFeels like i'm close but then again I might not be!\u003c/p\u003e\n\n\u003cp\u003eCheers\u003c/p\u003e","answer_count":"1","comment_count":"0","creation_date":"2014-09-17 14:55:42.357 UTC","last_activity_date":"2014-09-19 20:00:52.87 UTC","last_edit_date":"2014-09-17 15:27:52.067 UTC","last_editor_display_name":"","last_editor_user_id":"3975214","owner_display_name":"","owner_user_id":"4050943","post_type_id":"1","score":"2","tags":"cube|powerpivot","view_count":"2489"} {"id":"11515934","title":"AsyncTaskLoader onLoadFinished with a pending task and config change","body":"\u003cp\u003eI'm trying to use an \u003ccode\u003eAsyncTaskLoader\u003c/code\u003e to load data in the background to populate a detail view in response to a list item being chosen. I've gotten it mostly working but I'm still having one issue. If I choose a second item in the list and then rotate the device \u003cem\u003ebefore the load for the first selected item has completed\u003c/em\u003e, then the \u003ccode\u003eonLoadFinished()\u003c/code\u003e call is reporting to the activity being stopped rather than the new activity. This works fine when choosing just a single item and then rotating.\u003c/p\u003e\n\n\u003cp\u003eHere is the code I'm using. Activity:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003epublic final class DemoActivity extends Activity\n implements NumberListFragment.RowTappedListener,\n LoaderManager.LoaderCallbacks\u0026lt;String\u0026gt; {\n\n private static final AtomicInteger activityCounter = new AtomicInteger(0);\n\n private int myActivityId;\n\n private ResultFragment resultFragment;\n\n private Integer selectedNumber;\n\n @Override\n public void onCreate(Bundle savedInstanceState) {\n super.onCreate(savedInstanceState);\n\n myActivityId = activityCounter.incrementAndGet();\n Log.d(\"DemoActivity\", \"onCreate for \" + myActivityId);\n\n setContentView(R.layout.demo);\n\n resultFragment = (ResultFragment) getFragmentManager().findFragmentById(R.id.result_fragment);\n\n getLoaderManager().initLoader(0, null, this);\n\n }\n\n @Override\n protected void onDestroy() {\n super.onDestroy();\n Log.d(\"DemoActivity\", \"onDestroy for \" + myActivityId);\n }\n\n @Override\n public void onRowTapped(Integer number) {\n selectedNumber = number;\n resultFragment.setResultText(\"Fetching details for item \" + number + \"...\");\n getLoaderManager().restartLoader(0, null, this);\n }\n\n @Override\n public Loader\u0026lt;String\u0026gt; onCreateLoader(int id, Bundle args) {\n return new ResultLoader(this, selectedNumber);\n }\n\n @Override\n public void onLoadFinished(Loader\u0026lt;String\u0026gt; loader, String data) {\n Log.d(\"DemoActivity\", \"onLoadFinished reporting to activity \" + myActivityId);\n resultFragment.setResultText(data);\n }\n\n @Override\n public void onLoaderReset(Loader\u0026lt;String\u0026gt; loader) {\n\n }\n\n static final class ResultLoader extends AsyncTaskLoader\u0026lt;String\u0026gt; {\n\n private static final Random random = new Random();\n\n private final Integer number;\n\n private String result;\n\n ResultLoader(Context context, Integer number) {\n super(context);\n this.number = number;\n }\n\n @Override\n public String loadInBackground() {\n // Simulate expensive Web call\n try {\n Thread.sleep(5000);\n } catch (InterruptedException e) {\n e.printStackTrace();\n }\n\n return \"Item \" + number + \" - Price: $\" + random.nextInt(500) + \".00, Number in stock: \" + random.nextInt(10000);\n }\n\n @Override\n public void deliverResult(String data) {\n if (isReset()) {\n // An async query came in while the loader is stopped\n return;\n }\n\n result = data;\n\n if (isStarted()) {\n super.deliverResult(data);\n }\n }\n\n @Override\n protected void onStartLoading() {\n if (result != null) {\n deliverResult(result);\n }\n\n // Only do a load if we have a source to load from\n if (number != null) {\n forceLoad();\n }\n }\n\n @Override\n protected void onStopLoading() {\n // Attempt to cancel the current load task if possible.\n cancelLoad();\n }\n\n @Override\n protected void onReset() {\n super.onReset();\n\n // Ensure the loader is stopped\n onStopLoading();\n\n result = null;\n }\n\n }\n\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eList fragment:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003epublic final class NumberListFragment extends ListFragment {\n\n interface RowTappedListener {\n\n void onRowTapped(Integer number);\n\n }\n\n private RowTappedListener rowTappedListener;\n\n @Override\n public void onAttach(Activity activity) {\n super.onAttach(activity);\n\n rowTappedListener = (RowTappedListener) activity;\n }\n\n @Override\n public void onActivityCreated(Bundle savedInstanceState) {\n super.onActivityCreated(savedInstanceState);\n\n ArrayAdapter\u0026lt;Integer\u0026gt; adapter = new ArrayAdapter\u0026lt;Integer\u0026gt;(getActivity(),\n R.layout.simple_list_item_1,\n Arrays.asList(1, 2, 3, 4, 5, 6));\n setListAdapter(adapter);\n\n }\n\n @Override\n public void onListItemClick(ListView l, View v, int position, long id) {\n ArrayAdapter\u0026lt;Integer\u0026gt; adapter = (ArrayAdapter\u0026lt;Integer\u0026gt;) getListAdapter();\n rowTappedListener.onRowTapped(adapter.getItem(position));\n }\n\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eResult fragment:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003epublic final class ResultFragment extends Fragment {\n\n private TextView resultLabel;\n\n @Override\n public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {\n View root = inflater.inflate(R.layout.result_fragment, container, false);\n\n resultLabel = (TextView) root.findViewById(R.id.result_label);\n if (savedInstanceState != null) {\n resultLabel.setText(savedInstanceState.getString(\"labelText\", \"\"));\n }\n\n return root;\n }\n\n @Override\n public void onSaveInstanceState(Bundle outState) {\n super.onSaveInstanceState(outState);\n\n outState.putString(\"labelText\", resultLabel.getText().toString());\n }\n\n void setResultText(String resultText) {\n resultLabel.setText(resultText);\n }\n\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eI've been able to get this working using plain \u003ccode\u003eAsyncTask\u003c/code\u003es but I'm trying to learn more about \u003ccode\u003eLoader\u003c/code\u003es since they handle the configuration changes automatically.\u003c/p\u003e\n\n\u003chr\u003e\n\n\u003cp\u003e\u003cstrong\u003eEDIT\u003c/strong\u003e: I think I may have tracked down the issue by looking at the source for \u003ca href=\"http://grepcode.com/file/repository.grepcode.com/java/ext/com.google.android/android/4.0.3_r1/android/app/LoaderManager.java?av=f#552\"\u003eLoaderManager\u003c/a\u003e. When \u003ccode\u003einitLoader\u003c/code\u003e is called after the configuration change, the \u003ccode\u003eLoaderInfo\u003c/code\u003e object has its \u003ccode\u003emCallbacks\u003c/code\u003e field updated with the new activity as the implementation of \u003ccode\u003eLoaderCallbacks\u003c/code\u003e, as I would expect.\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003epublic \u0026lt;D\u0026gt; Loader\u0026lt;D\u0026gt; initLoader(int id, Bundle args, LoaderManager.LoaderCallbacks\u0026lt;D\u0026gt; callback) {\n if (mCreatingLoader) {\n throw new IllegalStateException(\"Called while creating a loader\");\n }\n\n LoaderInfo info = mLoaders.get(id);\n\n if (DEBUG) Log.v(TAG, \"initLoader in \" + this + \": args=\" + args);\n\n if (info == null) {\n // Loader doesn't already exist; create.\n info = createAndInstallLoader(id, args, (LoaderManager.LoaderCallbacks\u0026lt;Object\u0026gt;)callback);\n if (DEBUG) Log.v(TAG, \" Created new loader \" + info);\n } else {\n if (DEBUG) Log.v(TAG, \" Re-using existing loader \" + info);\n info.mCallbacks = (LoaderManager.LoaderCallbacks\u0026lt;Object\u0026gt;)callback;\n }\n\n if (info.mHaveData \u0026amp;\u0026amp; mStarted) {\n // If the loader has already generated its data, report it now.\n info.callOnLoadFinished(info.mLoader, info.mData);\n }\n\n return (Loader\u0026lt;D\u0026gt;)info.mLoader;\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eHowever, when there is a pending loader, the main \u003ccode\u003eLoaderInfo\u003c/code\u003e object also has an \u003ccode\u003emPendingLoader\u003c/code\u003e field with a reference to a \u003ccode\u003eLoaderCallbacks\u003c/code\u003e as well, and this object is never updated with the new activity in the \u003ccode\u003emCallbacks\u003c/code\u003e field. I would expect to see the code look like this instead:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e// This line was already there\ninfo.mCallbacks = (LoaderManager.LoaderCallbacks\u0026lt;Object\u0026gt;)callback;\n// This line is not currently there\ninfo.mPendingLoader.mCallbacks = (LoaderManager.LoaderCallbacks\u0026lt;Object\u0026gt;)callback;\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eIt appears to be because of this that the pending loader calls \u003ccode\u003eonLoadFinished\u003c/code\u003e on the old activity instance. If I breakpoint in this method and make the call that I feel is missing using the debugger, everything works as I expect.\u003c/p\u003e\n\n\u003cp\u003eThe new question is: Have I found a bug, or is this the expected behavior?\u003c/p\u003e","answer_count":"4","comment_count":"14","creation_date":"2012-07-17 04:15:59.85 UTC","favorite_count":"7","last_activity_date":"2014-04-23 21:28:50.293 UTC","last_edit_date":"2012-07-18 03:48:43.677 UTC","last_editor_display_name":"","last_editor_user_id":"278897","owner_display_name":"","owner_user_id":"278897","post_type_id":"1","score":"24","tags":"android|android-loadermanager|asynctaskloader|android-loader","view_count":"6034"} {"id":"13877297","title":"import org.jsoup.* not working","body":"\u003cp\u003eI try to make a java app using Jsoup.\u003c/p\u003e\n\n\u003cp\u003eInstead of using \u003c/p\u003e\n\n\u003cp\u003e(A)\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eimport org.jsoup.Jsoup;\nimport org.jsoup.helper.Validate;\nimport org.jsoup.helper.Validate;\nimport org.jsoup.nodes.Document;\nimport org.jsoup.nodes.Element;\nimport org.jsoup.select.Elements;\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eI want to use \u003c/p\u003e\n\n\u003cp\u003e(B)\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eimport org.jsoup.*;\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003e(A) is working but (B) is not...\u003c/p\u003e\n\n\u003cp\u003eI am using IntelliJ and imported the dependencies...\nWhy is this not working ?\u003c/p\u003e","accepted_answer_id":"13877410","answer_count":"1","comment_count":"1","creation_date":"2012-12-14 10:56:16.47 UTC","favorite_count":"1","last_activity_date":"2012-12-14 11:04:55.803 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"1790983","post_type_id":"1","score":"0","tags":"java|import|jsoup","view_count":"6319"} -{"id":"36389990","title":"Reverse string (irvine)","body":"\u003cp\u003eI am supposed to take a string that is at least 50 characters long using letter numbers and symbols and reverse it in assembler for a class. He gave us this piece of code:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003estring1 BYTE \"abcdefghijklmnopqurstuvwxyz\",10,13,0\nstring2 BYTE 50 DUP(?),10,13,0\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eWe are also not allowed to us stacks or string functions. \u003c/p\u003e\n\n\u003cp\u003eI think I get the logic to use but I need help with the syntax for it. I think we are supposed to get the number of characters in the string and then using loops we reverse them.\u003c/p\u003e\n\n\u003cp\u003eHere is what I have so far but I am getting error 2022 in line 15 saying that operands must be the same size.\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e TITLE masm Template (main.asm)\n; Description:reserve string code\n; \n; Revision date:4/3/16\n\nINCLUDE Irvine32.inc\n.data\nstring1 BYTE \"abcdefghijklmnopqrstuvwsyzABCDEFGHIJKLMNOPQRSTUVWSYZ123!@#\",10,13,0\nstring2 BYTE 58 DUP(?),10,13,0\ncount dw 13\n.code\n\nmain PROC\n mov ax, @data\n mov ds,ax\n mov es,ax\n mov cx,count ;cx=58\n mov sI,0\n mov di,0\n add di,count ;\n dec di\n\nagain: mov al,string1[si]\n mov string2[di],al;\n inc si\n dec di\n loop again\n call WriteString\n\n\n exit\nmain ENDP\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eWe are using visual studio 2013 for compiling \u003c/p\u003e\n\n\u003cp\u003e\u003ca href=\"https://www.youtube.com/watch?v=aOheFNXcIRI\" rel=\"nofollow\"\u003ehere is the video I used to get this far\u003c/a\u003e\u003c/p\u003e","answer_count":"0","comment_count":"6","creation_date":"2016-04-03 19:18:47.73 UTC","last_activity_date":"2016-04-04 07:22:24.42 UTC","last_edit_date":"2016-04-04 07:22:24.42 UTC","last_editor_display_name":"","last_editor_user_id":"3857942","owner_display_name":"","owner_user_id":"6153010","post_type_id":"1","score":"0","tags":"assembly|x86|reverse|masm|irvine32","view_count":"242"} +{"id":"36389990","title":"Reverse string (irvine)","body":"\u003cp\u003eI am supposed to take a string that is at least 50 characters long using letter numbers and symbols and reverse it in assembler for a class. He gave us this piece of code:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003estring1 BYTE \"abcdefghijklmnopqurstuvwxyz\",10,13,0\nstring2 BYTE 50 DUP(?),10,13,0\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eWe are also not allowed to us stacks or string functions. \u003c/p\u003e\n\n\u003cp\u003eI think I get the logic to use but I need help with the syntax for it. I think we are supposed to get the number of characters in the string and then using loops we reverse them.\u003c/p\u003e\n\n\u003cp\u003eHere is what I have so far but I am getting error 2022 in line 15 saying that operands must be the same size.\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e TITLE masm Template (main.asm)\n; Description:reserve string code\n; \n; Revision date:4/3/16\n\nINCLUDE Irvine32.inc\n.data\nstring1 BYTE \"abcdefghijklmnopqrstuvwsyzABCDEFGHIJKLMNOPQRSTUVWSYZ123!@#\",10,13,0\nstring2 BYTE 58 DUP(?),10,13,0\ncount dw 13\n.code\n\nmain PROC\n mov ax, @data\n mov ds,ax\n mov opensearch,ax\n mov cx,count ;cx=58\n mov sI,0\n mov di,0\n add di,count ;\n dec di\n\nagain: mov al,string1[si]\n mov string2[di],al;\n inc si\n dec di\n loop again\n call WriteString\n\n\n exit\nmain ENDP\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eWe are using visual studio 2013 for compiling \u003c/p\u003e\n\n\u003cp\u003e\u003ca href=\"https://www.youtube.com/watch?v=aOheFNXcIRI\" rel=\"nofollow\"\u003ehere is the video I used to get this far\u003c/a\u003e\u003c/p\u003e","answer_count":"0","comment_count":"6","creation_date":"2016-04-03 19:18:47.73 UTC","last_activity_date":"2016-04-04 07:22:24.42 UTC","last_edit_date":"2016-04-04 07:22:24.42 UTC","last_editor_display_name":"","last_editor_user_id":"3857942","owner_display_name":"","owner_user_id":"6153010","post_type_id":"1","score":"0","tags":"assembly|x86|reverse|masm|irvine32","view_count":"242"} {"id":"24447203","title":"Jenkins: how can I clean the workspace except the vagrant directory?","body":"\u003cp\u003eI have the following setup:\u003c/p\u003e\n\n\u003cp\u003eI use the \u003ca href=\"https://wiki.jenkins-ci.org/display/JENKINS/Workspace+Cleanup+Plugin\" rel=\"nofollow\"\u003eworkspace cleanup plugin\u003c/a\u003e in Jenkins to clean my workspace before each build. During my build-process I also trigger a \u003ccode\u003evagrant up\u003c/code\u003e to setup a VM for phpunit tests:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e$ vagrant up\n$ ./runtest.sh\n$ vagrant suspend\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eNow when I re-build the project, the VM gets build as a new one instead of just resuming the previous one. I guess this is because of the cleanup plugin removing the \u003ccode\u003e.vagrant\u003c/code\u003e-directory, therefore making Vagrant think it should build a new machine instead of just resuming the previous one.\u003c/p\u003e\n\n\u003cp\u003eNow I have configured the plugin to exclude the following patterns and I have the \u003cem\u003e'Apply pattern also on directories'\u003c/em\u003e-checkbox also checked:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e**/*.vagrant\n.vagrant\n.vagrant/\n./.vagrant\n./.vagrant/\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eBut still the \u003ccode\u003e.vagrant\u003c/code\u003e-directory gets deleted from the workspace on each new build, spawning a brand new VM each time...\u003c/p\u003e\n\n\u003cp\u003eDoes anyone know how I can exclude the \u003ccode\u003e.vagrant\u003c/code\u003e-directory from the workspace cleanup plugin?\u003c/p\u003e","accepted_answer_id":"24491642","answer_count":"2","comment_count":"0","creation_date":"2014-06-27 08:21:39.93 UTC","favorite_count":"1","last_activity_date":"2014-06-30 13:48:12.2 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"1471590","post_type_id":"1","score":"2","tags":"jenkins|vagrant","view_count":"1974"} {"id":"27662721","title":"Removing a menu from a wxPython MenuBar","body":"\u003cp\u003eI have created a MenuBar and appended a number of Menu objects to it.\u003c/p\u003e\n\n\u003cp\u003eI now want to remove one of the menus, having only reference to the Menu object appended, and not knowing or caring about the positioning and labeling of the menus.\u003c/p\u003e\n\n\u003cp\u003eThis seems like a trivial thing to do, but the API does not seem built to handle it; all methods are based on positions and labels. \u003c/p\u003e\n\n\u003cp\u003eThere exists a MenuBar.Remove() method, but it takes the position as argument. No method takes a menu and returns its position.\u003c/p\u003e\n\n\u003cp\u003eThe Detach() methods on Menus and MenuBars are undocumented and apparently do nothing.\u003c/p\u003e\n\n\u003cp\u003eI am sure this is a dumb question and that the solution is obvious, given that no one I could find have asked it before, but the solution eludes me.\u003c/p\u003e","answer_count":"1","comment_count":"0","creation_date":"2014-12-26 22:42:46.25 UTC","last_activity_date":"2015-01-05 06:31:55.023 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"3478688","post_type_id":"1","score":"0","tags":"python|wxpython|menubar","view_count":"262"} {"id":"47344309","title":"Why cant my mobile recognize a valid answer delimited by: response validation \u003eregular expression \u003ematches","body":"\u003cp\u003eI'm sorry, English is not my first language, I'll try to make myself clear.\u003c/p\u003e\n\n\u003cp\u003eI have a field for email, if the email entered matches with one of my list you may continue. So, I restricted the field as shown in the image.\u003c/p\u003e\n\n\u003cp\u003eresponse validation :\u003c/p\u003e\n\n\u003cp\u003e\u003cimg src=\"https://i.stack.imgur.com/a27J9.jpg\" alt=\"response validation\"\u003e\u003c/p\u003e\n\n\u003cp\u003eEvery Email from my list is in the restriction separated by \"|\" as follows:\u003c/p\u003e\n\n\u003cblockquote class=\"spoiler\"\u003e\n \u003cp\u003e gchimail@yahoo.com.mx|timail2@hotmail.com|alemail@hotmail.com|ale_gmail@hotmail.com|almajimal@hotmail.com....etc...\u003c/p\u003e\n\u003c/blockquote\u003e\n\n\u003cp\u003eIt works perfectly in any computer, but it fails in SOME MOBILES, so they cannot access the Form from their devices.\u003c/p\u003e\n\n\u003cp\u003eDo you have any clue of what might be going on with those mobiles???\nI really need to solve this, we use this form weekly.\u003c/p\u003e\n\n\u003cp\u003eThanks in advance.\u003c/p\u003e","answer_count":"0","comment_count":"1","creation_date":"2017-11-17 06:09:46.64 UTC","favorite_count":"1","last_activity_date":"2017-11-17 07:25:12.843 UTC","last_edit_date":"2017-11-17 07:25:12.843 UTC","last_editor_display_name":"","last_editor_user_id":"8404453","owner_display_name":"","owner_user_id":"8955816","post_type_id":"1","score":"0","tags":"regex|validation|mobile|google-form|smartphone","view_count":"35"} @@ -3593,7 +3593,7 @@ {"id":"21615376","title":"Main user for app - best implementation","body":"\u003cp\u003eI'm creating an app, it is looks like Twitter app, UITabBarController + UINavigationController.\u003c/p\u003e\n\n\u003cp\u003eIn my app i need to have one main user (e.g. \u003cstrong\u003eYOU\u003c/strong\u003e) and other users, \u003cstrong\u003eYOU\u003c/strong\u003e and users sharing same structure i.e. class.\u003c/p\u003e\n\n\u003cp\u003eWith bunch of ordinary users i don't see a problem - you creating user object, using it, ARC doing its job. \u003c/p\u003e\n\n\u003cp\u003eMy question is: what is best way to save \u003cstrong\u003eYOUrs\u003c/strong\u003e data, access or pass it between tabs(and tabs can be selected in no particular order).\u003c/p\u003e\n\n\u003cp\u003eI already read many q\u0026amp;a about this topic, and I can use:\u003c/p\u003e\n\n\u003col\u003e\n\u003cli\u003eglobal var;\u003c/li\u003e\n\u003cli\u003eclass method (+);\u003c/li\u003e\n\u003cli\u003esingleton;\u003c/li\u003e\n\u003cli\u003eNSUserDefaults;\u003c/li\u003e\n\u003c/ol\u003e\n\n\u003cp\u003ebut, i don't think i really know right and \u003cstrong\u003eBEST\u003c/strong\u003e answer.\u003c/p\u003e\n\n\u003cp\u003e\u003cstrong\u003eEdit:\u003c/strong\u003e\nSorry if i wrote unclear, i'll try to explain better. \nWhen you start app, you need to login, after login you can talk to other users, make tasks for them etc.\nThere is really \u003cstrong\u003eone user\u003c/strong\u003e i want to store data for, because when i switch between tabs i need to know my id and other things. But other users i talk to is actually the same in terms of class design. They all need to have name, avatar, email, etc.\u003c/p\u003e","accepted_answer_id":"21625469","answer_count":"1","comment_count":"3","creation_date":"2014-02-06 22:24:22.04 UTC","last_activity_date":"2014-02-08 11:45:04.083 UTC","last_edit_date":"2014-02-07 07:37:34.78 UTC","last_editor_display_name":"","last_editor_user_id":"274390","owner_display_name":"","owner_user_id":"274390","post_type_id":"1","score":"-4","tags":"ios|objective-c|singleton|uitabbarcontroller","view_count":"56"} {"id":"20840357","title":"Execute jar file using hadoop","body":"\u003cp\u003eI want to execute a jar file which works fine upon executing from command line:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003ejava -Xmx3g -jar jarname.jar -T class_name_in_jar -R filename1 -I filename2 -known filename3 -o filename4\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eAbove command executes *class_name_in_jar* by taking input filename1, filename2 and filename3. it will generate output in filename4.\u003c/p\u003e\n\n\u003cp\u003eHere is my map reduce program:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eimport java.io.IOException;\n import java.util.*;\n import org.apache.hadoop.fs.Path;\n import org.apache.hadoop.conf.*;\n import org.apache.hadoop.io.*;\n import org.apache.hadoop.mapred.*;\n import org.apache.hadoop.util.*;\n\n public class GatkWordCount {\n\n public static class Reduce extends MapReduceBase implements Reducer\u0026lt;Text, IntWritable, Text, IntWritable\u0026gt; {\n public void reduce(Text key, Iterator\u0026lt;IntWritable\u0026gt; values, OutputCollector\u0026lt;Text, IntWritable\u0026gt; output, Reporter reporter) throws IOException {\n String find_targets_cmd = \"java -Xmx3g -jar \u0026lt;jarname\u0026gt;.jar -T \u0026lt;class name in jar\u0026gt; -R \u0026lt;filename1\u0026gt; -I \u0026lt;filename2\u0026gt; -known \u0026lt;filename3\u0026gt; -o \u0026lt;filename4\u0026gt;\";\n\n exceptionOnError(execAndReconnect(find_targets_cmd));\n }\n }\n\n public static int execAndReconnect(String cmd) throws IOException {\n Process p = Runtime.getRuntime().exec(cmd);\n p.waitFor();\n return p.exitValue();\n }\n\n public static void exceptionOnError(int errorCode) throws IOException{\n if(0 != errorCode)\n throw new IOException(String.valueOf(errorCode));\n }\n\n public static void main(String[] args) throws Exception {\n JobConf conf = new JobConf(GatkWordCount.class);\n conf.setJobName(\"GatkWordCount\");\n\n conf.setOutputKeyClass(Text.class);\n conf.setOutputValueClass(IntWritable.class);\n\n conf.setReducerClass(Reduce.class);\n\n conf.setInputFormat(TextInputFormat.class);\n conf.setOutputFormat(TextOutputFormat.class);\n\n FileInputFormat.setInputPaths(conf, new Path(args[0]));\n FileOutputFormat.setOutputPath(conf, new Path(args[1]));\n\n JobClient.runJob(conf);\n }\n }\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003chr\u003e\n\n\u003cp\u003eIn \u003ccode\u003eHDFS\u003c/code\u003e, I have put all the required input files.\nI have executed below command:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e enter code herehadoop/bin/hadoop jar gatkword.jar GatkWordCount /user/hduser/gatkinput/gatkinput/group.bam /user/hduser/gatkword2\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eBelow is the error message am getting after executing above command:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e13/12/29 17:58:59 WARN mapred.JobClient: Use GenericOptionsParser for parsing the arguments. Applications should implement Tool for the same.\n13/12/29 17:58:59 INFO util.NativeCodeLoader: Loaded the native-hadoop library\n13/12/29 17:58:59 WARN snappy.LoadSnappy: Snappy native library not loaded\n13/12/29 17:58:59 INFO mapred.FileInputFormat: Total input paths to process : 1\n13/12/29 17:58:59 INFO mapred.JobClient: Running job: job_201312261425_0013\n13/12/29 17:59:00 INFO mapred.JobClient: map 0% reduce 0%\n13/12/29 17:59:06 INFO mapred.JobClient: Task Id : attempt_201312261425_0013_m_000000_0, Status : FAILED\njava.io.IOException: Type mismatch in key from map: expected org.apache.hadoop.io.Text, recieved org.apache.hadoop.io.LongWritable\n at org.apache.hadoop.mapred.MapTask$MapOutputBuffer.collect(MapTask.java:1014)\n at org.apache.hadoop.mapred.MapTask$OldOutputCollector.collect(MapTask.java:592)\n at org.apache.hadoop.mapred.lib.IdentityMapper.map(IdentityMapper.java:38)\n at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:50)\n at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:436)\n at org.apache.hadoop.mapred.MapTask.run(MapTask.java:372)\n at org.apache.hadoop.mapred.Child$4.run(Child.java:255)\n at java.security.AccessController.doPrivileged(Native Method)\n at javax.security.auth.Subject.doAs(Subject.java:415)\n at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1136)\n at org.apache.hadoop.mapred.Child.main(Child.java:249)\n\n13/12/29 17:59:06 INFO mapred.JobClient: Task Id : attempt_201312261425_0013_m_000001_0, Status : FAILED\njava.io.IOException: Type mismatch in key from map: expected org.apache.hadoop.io.Text, recieved org.apache.hadoop.io.LongWritable\n at org.apache.hadoop.mapred.MapTask$MapOutputBuffer.collect(MapTask.java:1014)\n at org.apache.hadoop.mapred.MapTask$OldOutputCollector.collect(MapTask.java:592)\n at org.apache.hadoop.mapred.lib.IdentityMapper.map(IdentityMapper.java:38)\n at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:50)\n at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:436)\n at org.apache.hadoop.mapred.MapTask.run(MapTask.java:372)\n at org.apache.hadoop.mapred.Child$4.run(Child.java:255)\n at java.security.AccessController.doPrivileged(Native Method)\n at javax.security.auth.Subject.doAs(Subject.java:415)\n at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1136)\n at org.apache.hadoop.mapred.Child.main(Child.java:249)\n\n13/12/29 17:59:11 INFO mapred.JobClient: Task Id : attempt_201312261425_0013_m_000000_1, Status : FAILED\njava.io.IOException: Type mismatch in key from map: expected org.apache.hadoop.io.Text, recieved org.apache.hadoop.io.LongWritable\n at org.apache.hadoop.mapred.MapTask$MapOutputBuffer.collect(MapTask.java:1014)\n at org.apache.hadoop.mapred.MapTask$OldOutputCollector.collect(MapTask.java:592)\n at org.apache.hadoop.mapred.lib.IdentityMapper.map(IdentityMapper.java:38)\n at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:50)\n at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:436)\n at org.apache.hadoop.mapred.MapTask.run(MapTask.java:372)\n at org.apache.hadoop.mapred.Child$4.run(Child.java:255)\n at java.security.AccessController.doPrivileged(Native Method)\n at javax.security.auth.Subject.doAs(Subject.java:415)\n at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1136)\n at org.apache.hadoop.mapred.Child.main(Child.java:249)\n\n13/12/29 17:59:11 INFO mapred.JobClient: Task Id : attempt_201312261425_0013_m_000001_1, Status : FAILED\njava.io.IOException: Type mismatch in key from map: expected org.apache.hadoop.io.Text, recieved org.apache.hadoop.io.LongWritable\n at org.apache.hadoop.mapred.MapTask$MapOutputBuffer.collect(MapTask.java:1014)\n at org.apache.hadoop.mapred.MapTask$OldOutputCollector.collect(MapTask.java:592)\n at org.apache.hadoop.mapred.lib.IdentityMapper.map(IdentityMapper.java:38)\n at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:50)\n at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:436)\n at org.apache.hadoop.mapred.MapTask.run(MapTask.java:372)\n at org.apache.hadoop.mapred.Child$4.run(Child.java:255)\n at java.security.AccessController.doPrivileged(Native Method)\n at javax.security.auth.Subject.doAs(Subject.java:415)\n at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1136)\n at org.apache.hadoop.mapred.Child.main(Child.java:249)\n\n13/12/29 17:59:17 INFO mapred.JobClient: Task Id : attempt_201312261425_0013_m_000000_2, Status : FAILED\njava.io.IOException: Type mismatch in key from map: expected org.apache.hadoop.io.Text, recieved org.apache.hadoop.io.LongWritable\n at org.apache.hadoop.mapred.MapTask$MapOutputBuffer.collect(MapTask.java:1014)\n at org.apache.hadoop.mapred.MapTask$OldOutputCollector.collect(MapTask.java:592)\n at org.apache.hadoop.mapred.lib.IdentityMapper.map(IdentityMapper.java:38)\n at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:50)\n at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:436)\n at org.apache.hadoop.mapred.MapTask.run(MapTask.java:372)\n at org.apache.hadoop.mapred.Child$4.run(Child.java:255)\n at java.security.AccessController.doPrivileged(Native Method)\n at javax.security.auth.Subject.doAs(Subject.java:415)\n at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1136)\n at org.apache.hadoop.mapred.Child.main(Child.java:249)\n\n13/12/29 17:59:17 INFO mapred.JobClient: Task Id : attempt_201312261425_0013_m_000001_2, Status : FAILED\njava.io.IOException: Type mismatch in key from map: expected org.apache.hadoop.io.Text, recieved org.apache.hadoop.io.LongWritable\n at org.apache.hadoop.mapred.MapTask$MapOutputBuffer.collect(MapTask.java:1014)\n at org.apache.hadoop.mapred.MapTask$OldOutputCollector.collect(MapTask.java:592)\n at org.apache.hadoop.mapred.lib.IdentityMapper.map(IdentityMapper.java:38)\n at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:50)\n at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:436)\n at org.apache.hadoop.mapred.MapTask.run(MapTask.java:372)\n at org.apache.hadoop.mapred.Child$4.run(Child.java:255)\n at java.security.AccessController.doPrivileged(Native Method)\n at javax.security.auth.Subject.doAs(Subject.java:415)\n at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1136)\n at org.apache.hadoop.mapred.Child.main(Child.java:249)\n\n13/12/29 17:59:22 INFO mapred.JobClient: Job complete: job_201312261425_0013\n13/12/29 17:59:22 INFO mapred.JobClient: Counters: 7\n13/12/29 17:59:22 INFO mapred.JobClient: Job Counters \n13/12/29 17:59:22 INFO mapred.JobClient: SLOTS_MILLIS_MAPS=42572\n13/12/29 17:59:22 INFO mapred.JobClient: Total time spent by all reduces waiting after reserving slots (ms)=0\n13/12/29 17:59:22 INFO mapred.JobClient: Total time spent by all maps waiting after reserving slots (ms)=0\n13/12/29 17:59:22 INFO mapred.JobClient: Launched map tasks=8\n13/12/29 17:59:22 INFO mapred.JobClient: Data-local map tasks=8\n13/12/29 17:59:22 INFO mapred.JobClient: SLOTS_MILLIS_REDUCES=0\n13/12/29 17:59:22 INFO mapred.JobClient: Failed map tasks=1\n13/12/29 17:59:22 INFO mapred.JobClient: Job Failed: # of failed Map Tasks exceeded allowed limit. FailedCount: 1. LastFailedTask: task_201312261425_0013_m_000000\nException in thread \"main\" java.io.IOException: Job failed!\n at org.apache.hadoop.mapred.JobClient.runJob(JobClient.java:1327)\n at GatkWordCount.main(GatkWordCount.java:51)\n at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)\n at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)\n at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)\n at java.lang.reflect.Method.invoke(Method.java:601)\n at org.apache.hadoop.util.RunJar.main(RunJar.java:156)\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003ePlease suggest what needs to be change in my code in order to execute it properly. Thanks for your help.\u003c/p\u003e","answer_count":"2","comment_count":"0","creation_date":"2013-12-30 13:16:31.563 UTC","last_activity_date":"2014-01-02 11:58:45.317 UTC","last_edit_date":"2014-01-02 10:37:02.367 UTC","last_editor_display_name":"","last_editor_user_id":"3146435","owner_display_name":"","owner_user_id":"3146435","post_type_id":"1","score":"0","tags":"hadoop|mapreduce","view_count":"825"} {"id":"27166855","title":"Using PyCharm Professional and Vagrant, how do I run a Django server?","body":"\u003cp\u003eI have set up already my configuration so that it will run the server remotely. When I click run, I see the command used:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003essh://vagrant@localhost:2222/usr/bin/python -u \"C:/Users/MyName/ProjectName/config/manage.py\" runserver localhost:8080\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003e(I've replaced the directory names for anonymity reasons).\u003c/p\u003e\n\n\u003cp\u003eWhen I do run this, It fails (obviously) because it's using a windows path to my manage.py\nSpecifically the error I get is\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e`/usr/bin/python: can't open file 'C:/Users/MyName/judgeapps/config/manage.py': [Errno 2] No such file or directory\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eWhat I can't figure out after extensive googling, is how to force django to use a path on my vagrant machine. How can I go about doing this?\u003c/p\u003e","accepted_answer_id":"27166947","answer_count":"1","comment_count":"0","creation_date":"2014-11-27 09:14:27.55 UTC","favorite_count":"4","last_activity_date":"2014-11-27 09:58:55.803 UTC","last_edit_date":"2014-11-27 09:39:05.893 UTC","last_editor_display_name":"","last_editor_user_id":"1318181","owner_display_name":"","owner_user_id":"3286955","post_type_id":"1","score":"4","tags":"python|django|vagrant|pycharm","view_count":"3347"} -{"id":"28861732","title":"Best practices for field names in ElasticSearch","body":"\u003cp\u003eI'm looking at simple ways of defining, in Java, some lightweight service and data access layers on top of ElasticSearch. My POJO data objects will naturally have property names in camelCase, but I'm wondering if I should use camelCase for the field names in the ElasticSearch type mappings. In the data repository world, and in particular in traditional RDBMS, field names are definitely not camel cased. If I'm not mistaken, there seems to be a trend in the NoSql world to use underscores in field names, e.g. first_name. Is this a common practice for ElasticSearch ?\nIf so, does this mean I have to configure a Jackson based conversion service that is able to map back and forth between the camelCase and underscored field names ?\u003c/p\u003e","answer_count":"1","comment_count":"0","creation_date":"2015-03-04 18:08:39.68 UTC","favorite_count":"2","last_activity_date":"2015-03-05 03:16:56.85 UTC","last_edit_date":"2015-03-05 03:16:56.85 UTC","last_editor_display_name":"","last_editor_user_id":"1797393","owner_display_name":"","owner_user_id":"269418","post_type_id":"1","score":"4","tags":"java|elasticsearch|naming-conventions","view_count":"2283"} +{"id":"28861732","title":"Best practices for field names in OpenSearch","body":"\u003cp\u003eI'm looking at simple ways of defining, in Java, some lightweight service and data access layers on top of OpenSearch. My POJO data objects will naturally have property names in camelCase, but I'm wondering if I should use camelCase for the field names in the OpenSearch type mappings. In the data repository world, and in particular in traditional RDBMS, field names are definitely not camel cased. If I'm not mistaken, there seems to be a trend in the NoSql world to use underscores in field names, e.g. first_name. Is this a common practice for OpenSearch ?\nIf so, does this mean I have to configure a Jackson based conversion service that is able to map back and forth between the camelCase and underscored field names ?\u003c/p\u003e","answer_count":"1","comment_count":"0","creation_date":"2015-03-04 18:08:39.68 UTC","favorite_count":"2","last_activity_date":"2015-03-05 03:16:56.85 UTC","last_edit_date":"2015-03-05 03:16:56.85 UTC","last_editor_display_name":"","last_editor_user_id":"1797393","owner_display_name":"","owner_user_id":"269418","post_type_id":"1","score":"4","tags":"java|opensearch|naming-conventions","view_count":"2283"} {"id":"4638709","title":"Best approach to dynamically filter .Net objects","body":"\u003cp\u003eThe project I'm working currently on has a way to define a filter on objects from a database.\u003cbr\u003e\nThis filter is a pretty straightforward class containing criteria that will be combined to produce a SQL \u003ccode\u003ewhere\u003c/code\u003e clause. \u003c/p\u003e\n\n\u003cp\u003eThe goal now is to use this class to filter .Net objects as well. So for example the filter might specify that the title property of the object that it is applied to must contain some user-defined string.\u003c/p\u003e\n\n\u003cp\u003eWhat are ways to approach this problem? What should the filter return instead of the sql where-clause and how can it be applied to the object? I've been think about this for hours and don´t yet have even a slight idea how to solve this. Been thinking about reflection, dynamic code execution, building expressions but still haven´t found a starting point.\u003c/p\u003e","accepted_answer_id":"4638736","answer_count":"2","comment_count":"4","creation_date":"2011-01-09 10:35:58.653 UTC","last_activity_date":"2011-01-09 11:08:08.72 UTC","last_edit_date":"2011-01-09 11:08:08.72 UTC","last_editor_display_name":"","last_editor_user_id":"1060","owner_display_name":"","owner_user_id":"568697","post_type_id":"1","score":"2","tags":"c#|design|dynamic|filtering","view_count":"930"} {"id":"18994357","title":"Android: Custom Surface View crashing upon orientation change","body":"\u003cp\u003eI was reading \u003ca href=\"http://android-er.blogspot.com/2010/05/another-exercise-of-surfaceview-in.html\" rel=\"nofollow\"\u003ethis tutorial\u003c/a\u003e on how to use a custom surface view class in an XML layout and when I ran the code, the app crashed when my phone's orientation changed. I've noticed a lot of examples involving custom threads and surface view subclasses crashing when the orientation changes, does anybody have any idea why this is happening?\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e09-25 18:19:40.639: E/Trace(4982): error opening trace file: No such file or directory (2)\n09-25 18:19:40.639: D/ActivityThread(4982): setTargetHeapUtilization:0.25\n09-25 18:19:40.639: D/ActivityThread(4982): setTargetHeapIdealFree:8388608\n09-25 18:19:40.639: D/ActivityThread(4982): setTargetHeapConcurrentStart:2097152\n09-25 18:19:40.959: D/libEGL(4982): loaded /system/lib/egl/libEGL_adreno200.so\n09-25 18:19:40.979: D/libEGL(4982): loaded /system/lib/egl/libGLESv1_CM_adreno200.so\n09-25 18:19:40.979: D/libEGL(4982): loaded /system/lib/egl/libGLESv2_adreno200.so\n09-25 18:19:41.049: I/Adreno200-EGLSUB(4982): \u0026lt;ConfigWindowMatch:2087\u0026gt;: Format RGBA_8888.\n09-25 18:19:41.099: E/(4982): \u0026lt;s3dReadConfigFile:75\u0026gt;: Can't open file for reading\n09-25 18:19:41.099: E/(4982): \u0026lt;s3dReadConfigFile:75\u0026gt;: Can't open file for reading\n09-25 18:19:41.099: D/OpenGLRenderer(4982): Enabling debug mode 0\n09-25 18:19:58.127: W/dalvikvm(4982): threadid=11: thread exiting with uncaught exception (group=0x40d46438)\n09-25 18:19:58.147: E/AndroidRuntime(4982): FATAL EXCEPTION: Thread-156822\n09-25 18:19:58.147: E/AndroidRuntime(4982): java.lang.NullPointerException\n09-25 18:19:58.147: E/AndroidRuntime(4982): at com.example.practicesurface.MySurfaceView.onDraw(MySurfaceView.java:129)\n09-25 18:19:58.147: E/AndroidRuntime(4982): at com.example.practicesurface.MySurfaceView$MySurfaceThread.run(MySurfaceView.java:39)\n\u003c/code\u003e\u003c/pre\u003e","accepted_answer_id":"18994627","answer_count":"1","comment_count":"0","creation_date":"2013-09-25 00:37:44.067 UTC","last_activity_date":"2013-09-26 01:22:34.247 UTC","last_edit_date":"2013-09-26 01:22:34.247 UTC","last_editor_display_name":"user2563044","owner_display_name":"user2563044","post_type_id":"1","score":"0","tags":"android|xml|surfaceview","view_count":"449"} {"id":"300472","title":"What does the gnuwin32 program: [.exe do?","body":"\u003cp\u003eLooking in the gnuwin32/bin directory, there is an odd-looking program file named \u003ccode\u003e[.exe\u003c/code\u003e\u003c/p\u003e\n\n\u003cp\u003eI couldn't find it in the documentation, gnuwin32.sourceforge.net or in a google search, so I ran it and got:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e$ [\n[: missing `]'\n$\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eso I gave it ] as a parameter and got\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e$ [ ]\n\n$\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eIt didn't complain, so I assumed it was on the right track. I tried:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e$ [ hello ]\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eagain, no complaints. so I tried an arithmetic expression:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e$ [ 1 + 1 ]\n[: +: binary operator expected\n$\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eI tried a bunch of different combinations, including prefix \u0026amp; postfix notation but nothing seemed to work. What does this thing do?\u003c/p\u003e","accepted_answer_id":"300508","answer_count":"3","comment_count":"0","creation_date":"2008-11-18 22:58:56.793 UTC","last_activity_date":"2008-11-19 10:50:50.71 UTC","last_editor_display_name":"","owner_display_name":"Ferruccio","owner_user_id":"4086","post_type_id":"1","score":"3","tags":"gnu|gnuwin32","view_count":"890"} @@ -3822,7 +3822,7 @@ {"id":"1560188","title":"php dynamic checkboxes","body":"\u003cp\u003eCurrently I have a form that submits an image with textfields such as\ntitle, description and another field that autoincrements for imageID, another\narea for the actual file , called vfile, and *** another part that has\n3 checkboxes and a text field.\nEverything works fine, and this is what it does. Submits the data to a database so that it can pull the information to a page on the website.\nThe only part I am trying to update is:\nThe 3 checkboxes and the textfield.\nLets say the first checkbox reads: Apples\nThe second : Oranges\nThe Third: Grapes\nAnd in the other category is a blank textfield that if you add something, it would add it to a category called \"Other\".\u003c/p\u003e\n\n\u003cp\u003eSo the database design has 4 fields: 1 - apples, 2 - oranges, 3 - grapes, 4 - other.\u003c/p\u003e\n\n\u003cp\u003eWhen I click a checkbox, it would add checked to the database under the correct one, either apples, oranges, or grapes.\nIf I add a field to the textbox such as: Bannanas, then it would add \"Bannanas\" to the database field vother and show that in the database.\u003c/p\u003e\n\n\u003cp\u003eThis is all fine, but what if the next picture has all 4 items, plus another one? Such as if the next picture had Apples, Oranges, Grapes, Bannanas, and Plums?\u003c/p\u003e\n\n\u003cp\u003eHow could I have the \"Bannanas\" other category, change into a checkbox category that could be chosen for the next pics when I go to the add images page next time. \nSo that when I go to the second picture to submit, it would give me the option of not just 3 checkboxes, but 4 checkboxes now, that I could check the first 4, \"Apples, Oranges, Grapes, Bannanas\" and then put Plums in the other category.\u003c/p\u003e\n\n\u003cp\u003eBasically upon submit it takes what is in the other feild and addes a new category to the database, which is then displayed in the array of checkbox choices and it is removed from the Other Category now, for it is a checkbox. (thus it would not want the value left in the old field, for it would keep creating the same category over and rewriting the old data possibly.\u003c/p\u003e\n\n\u003cp\u003eAnyway, any suggestions?\nThanks in advance. \u003c/p\u003e","answer_count":"3","comment_count":"1","creation_date":"2009-10-13 13:15:23.28 UTC","favorite_count":"0","last_activity_date":"2009-10-13 16:16:14.177 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"162735","post_type_id":"1","score":"1","tags":"php|sql","view_count":"378"} {"id":"43264773","title":"PIL: DLL load failed: specified procedure could not be found","body":"\u003cp\u003eI've been beginning to work with images in Python and I wanted to start using PIL (Pillow). To install it, I ran \u003ccode\u003epip install Pillow\u003c/code\u003e. When installing, PIL was not previously installed. I also tried uninstalling it and reinstalling it, as well as using \u003ccode\u003epip3 install Pillow\u003c/code\u003e.\u003c/p\u003e\n\n\u003cp\u003eWhen I run it in Python, my first line is:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eFile \"C:\\Program Files\\Python36\\lib\\site-packages\\PIL\\Image.py\", line 56, in \u0026lt;module\u0026gt;\nfrom . import _imaging as core\nImportError: DLL load failed: The specified procedure could not be found.\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eI checked the directory, and the file _imaging.cp36-win_amd64.pyd is present under the PIL folder.\u003c/p\u003e\n\n\u003cp\u003eWhy is this happening if the needed DLL is there? How can I fix it?\u003c/p\u003e","accepted_answer_id":"43294088","answer_count":"5","comment_count":"5","creation_date":"2017-04-06 20:01:17.05 UTC","favorite_count":"2","last_activity_date":"2017-05-01 16:58:01.777 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"4780330","post_type_id":"1","score":"9","tags":"python|python-imaging-library|pillow","view_count":"6589"} {"id":"17143021","title":"How to install properly the latest version of CoffeeScript on Ubuntu (12.04)","body":"\u003cp\u003eHow to install step by step the latest version of CoffeeScript on Ubuntu 12.04.\u003c/p\u003e\n\n\u003cp\u003eThe current version of CoffeeScript is 1.6.3\u003c/p\u003e\n\n\u003cp\u003eAny comments are be very useful.\u003c/p\u003e","accepted_answer_id":"17145118","answer_count":"3","comment_count":"0","creation_date":"2013-06-17 08:11:35.667 UTC","favorite_count":"2","last_activity_date":"2015-12-07 05:40:01.553 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"1977012","post_type_id":"1","score":"7","tags":"javascript|node.js|coffeescript|ubuntu-12.04|npm","view_count":"9881"} -{"id":"17825941","title":"Changing the rate at which a method is called","body":"\u003cp\u003eI'm still new to Objective C, and I feel this might be a probably a basic concept I don't know. \u003c/p\u003e\n\n\u003cp\u003eI'm working with OpenGL and I have the method GLKView of the viewcontroller, is called when \"the view needs to be updated\". From this method, I call another method, but I don't want the second method to be called at a rate that I specify. \u003c/p\u003e\n\n\u003cp\u003eHow would I go about accomplishing this? I understand that viewcontroller.preferredFramesPerSecond can be set, but I only want this ONE specific method to work on a different timer..\u003c/p\u003e\n\n\u003cp\u003eIs this even the right way of going about this?\u003c/p\u003e","answer_count":"1","comment_count":"0","creation_date":"2013-07-24 05:33:26.31 UTC","last_activity_date":"2013-07-24 07:28:25.687 UTC","last_edit_date":"2013-07-24 06:13:18.51 UTC","last_editor_display_name":"","last_editor_user_id":"72882","owner_display_name":"","owner_user_id":"2577959","post_type_id":"1","score":"0","tags":"iphone|objective-c|opengl-es","view_count":"64"} +{"id":"17825941","title":"Changing the rate at which a method is called","body":"\u003cp\u003eI'm still new to Objective C, and I feel this might be a probably a basic concept I don't know. \u003c/p\u003e\n\n\u003cp\u003eI'm working with OpenGL and I have the method GLKView of the viewcontroller, is called when \"the view needs to be updated\". From this method, I call another method, but I don't want the second method to be called at a rate that I specify. \u003c/p\u003e\n\n\u003cp\u003eHow would I go about accomplishing this? I understand that viewcontroller.preferredFramesPerSecond can be set, but I only want this ONE specific method to work on a different timer..\u003c/p\u003e\n\n\u003cp\u003eIs this even the right way of going about this?\u003c/p\u003e","answer_count":"1","comment_count":"0","creation_date":"2013-07-24 05:33:26.31 UTC","last_activity_date":"2013-07-24 07:28:25.687 UTC","last_edit_date":"2013-07-24 06:13:18.51 UTC","last_editor_display_name":"","last_editor_user_id":"72882","owner_display_name":"","owner_user_id":"2577959","post_type_id":"1","score":"0","tags":"iphone|objective-c|opengl-opensearch","view_count":"64"} {"id":"10663571","title":"How can I make one language to other language font converter Using javascript?","body":"\u003cp\u003eI want to make a converter using javascript which will convert one font to other. \nExample: If I type \"A\" to a form then the other font will display \"আ\".\u003c/p\u003e","answer_count":"2","comment_count":"4","creation_date":"2012-05-19 08:17:12.8 UTC","last_activity_date":"2012-05-19 08:35:42.113 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"1404820","post_type_id":"1","score":"-1","tags":"javascript|html","view_count":"198"} {"id":"40970898","title":"Assigning a Target for UISegmentedControl Swift 3","body":"\u003cp\u003eI have a \u003ccode\u003eUISegmentedControl\u003c/code\u003e that aims to toggle between 3 types of map views \"Standard\", \".Hybrid\", and \"Satellite\". I am getting the following error on the line \".addTarget\" Line.\u003c/p\u003e\n\n\u003cblockquote\u003e\n \u003cp\u003e\"Editor placeholder in source file\"\u003c/p\u003e\n\u003c/blockquote\u003e\n\n\u003cpre\u003e\u003ccode\u003e let segmentedControl = UISegmentedControl(items: [\"Standard\", \"Hybrid\", \"Satellite\"])\n segmentedControl.backgroundColor = UIColor.white.withAlphaComponent(0.5)\n segmentedControl.selectedSegmentIndex = 0\n\n // EVENT LISTENER FOR SEGMENT CONTROL\n segmentedControl.addTarget(self, action: \"mapTypeChanged:\", for: .valueChanged)\n\n func mapTypeChanged(segControl: UISegmentedControl){\n switch segControl.selectedSegmentIndex{\n case 0:\n mapView.mapType = .standard\n case 1:\n mapView.mapType = .hybrid\n case 2:\n mapView.mapType = .satellite\n default:\n break\n }\n\n }\n\u003c/code\u003e\u003c/pre\u003e","answer_count":"2","comment_count":"0","creation_date":"2016-12-05 09:24:16.063 UTC","last_activity_date":"2016-12-05 09:35:37.633 UTC","last_edit_date":"2016-12-05 09:35:37.633 UTC","last_editor_display_name":"","last_editor_user_id":"5044042","owner_display_name":"","owner_user_id":"4650787","post_type_id":"1","score":"-1","tags":"swift3","view_count":"1858"} {"id":"40786247","title":"Share session between multiple WKWebView","body":"\u003cp\u003eI need to implement billing logic. It does a few redirects and then opens the new frame in the new window – that's how it works on the web-browser.\u003c/p\u003e\n\n\u003cp\u003eI'm showing the billing frame in the WKWebView. I catch the moment when it wants to open the new frame (navigationAction.targetFrame.isNil) and ask webView to load new request. New frame is loading, but some redirects aren't happening and billing shows me an error. Looks like the session is lost.\u003c/p\u003e\n\n\u003cp\u003eI tried another way: load new request in the new webView. When I initialize the webView I pass the processPull from the previous one, following this article: \u003ca href=\"https://github.com/ShingoFukuyama/WKWebViewTips#cookie-sharing-between-multiple-wkwebviews\" rel=\"nofollow noreferrer\"\u003ehttps://github.com/ShingoFukuyama/WKWebViewTips#cookie-sharing-between-multiple-wkwebviews\u003c/a\u003e Problem wasn't solve.\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003elazy var webView: WKWebView = { [unowned self] in\n let preferences = WKPreferences()\n preferences.javaScriptEnabled = true\n preferences.javaScriptCanOpenWindowsAutomatically = true\n\n let configuration = WKWebViewConfiguration()\n configuration.preferences = preferences\n\n let webView = WKWebView(frame: CGRect.zero, configuration: configuration)\n webView.navigationDelegate = self\n webView.UIDelegate = self\n webView.estimatedProgress\n webView.scrollView.backgroundColor = UIColor.binomoDarkGrey()\n self.view.addSubview(webView)\n webView.snp_makeConstraints { [unowned self] (make) in\n make.edges.equalTo(self.view)\n }\n\n return webView\n}()\n\n// MARK: WKNavigationDelegate\n\nfunc webView(webView: WKWebView, decidePolicyForNavigationAction navigationAction: WKNavigationAction, decisionHandler: (WKNavigationActionPolicy) -\u0026gt; Void) {\n if navigationAction.targetFrame.isNil {\n decisionHandler(.Cancel)\n webView.loadRequest(navigationAction.request)\n } else {\n decisionHandler(.Allow)\n }\n}\n\u003c/code\u003e\u003c/pre\u003e","answer_count":"1","comment_count":"2","creation_date":"2016-11-24 12:18:29.933 UTC","last_activity_date":"2017-05-06 08:48:42.5 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"1292399","post_type_id":"1","score":"0","tags":"ios|swift|wkwebview|wkwebviewconfiguration","view_count":"689"} @@ -4293,7 +4293,7 @@ {"id":"30645751","title":"API Testing Using SoapUI vs Postman vs Runscope","body":"\u003cp\u003eI'm new to using applications to test backend APIs. I've always just manually tested using the front-end applications. What I would like to do is to use an app that is designed specifically for backend API testing. So far, I've been directed to SoapUI, Postman, and Runscope. But I'm at a loss as I am more of a test analyst than I am a programmer, despite having experience automated testing in Selenium with JavaScript, Python and Ruby. Any suggestions? Thoughts? Warnings?\u003c/p\u003e\n\n\u003cp\u003e(I posted this to the QA page, too, so sorry for the duplicate question)\u003c/p\u003e","answer_count":"2","comment_count":"2","creation_date":"2015-06-04 13:54:39.017 UTC","favorite_count":"2","last_activity_date":"2017-01-05 06:53:44.55 UTC","last_edit_date":"2015-06-04 13:59:58.567 UTC","last_editor_display_name":"","last_editor_user_id":"4142943","owner_display_name":"","owner_user_id":"4142943","post_type_id":"1","score":"7","tags":"automated-tests|soapui|postman|web-api-testing|runscope","view_count":"10670"} {"id":"41619070","title":"Remove information from Reports and Metrics generated in Testlink","body":"\u003cp\u003eI need to customize the reports generated in Reports and Metrics in Testlink. What I want to do is remove the Execution Type and Estimated exec. Duration (min) of the \"Test Report\" and \"Test Plan Report\"\u003c/p\u003e\n\n\u003cp\u003eSince:\u003c/p\u003e\n\n\u003cp\u003eTestlink is GPL, its repository is available for download and changes. The reports in question are generated by the files:\u003c/p\u003e\n\n\u003cul\u003e\n\u003cli\u003e\u003cp\u003eTestlink / dev / apps / testlink-1.9.15 / lib / results /\nprintDocOptions.php\u003c/p\u003e\u003c/li\u003e\n\u003cli\u003e\u003cp\u003eTest Report: localhost/testlink/lib/results/printDocument.php?level=testsuite\u0026amp;id=30\u0026amp;type=testplan\u0026amp;docTestPlanId=2\u0026amp;format=0\u003c/p\u003e\u003c/li\u003e\n\u003cli\u003e\u003cp\u003eTest Plan Report: localhost/testlink/lib/results/printDocument.php?level=testsuite\u0026amp;id=30\u0026amp;type=testreport\u0026amp;docTestPlanId=2\u0026amp;format=0\u003c/p\u003e\u003c/li\u003e\n\u003cli\u003e\u003cp\u003eTestlink / dev / apps / testlink-1.9.15 / lib /\nresults / testlink-1.9.15 / lib / results / printDocument.php\u003c/p\u003e\u003c/li\u003e\n\u003cli\u003eAnd your requires ();\u003c/li\u003e\n\u003c/ul\u003e\n\n\u003cp\u003e\u003cstrong\u003eI would like to remove from the report the information that is circled in red in the image:\u003c/strong\u003e\u003c/p\u003e\n\n\u003cp\u003eDrive public image: \u003ca href=\"https://drive.google.com/file/d/0B4B3pCn0kyxOMXF0ZXB2c3I4a28/view?usp=sharing\" rel=\"nofollow noreferrer\"\u003ehttps://drive.google.com/file/d/0B4B3pCn0kyxOMXF0ZXB2c3I4a28/view?usp=sharing\u003c/a\u003e\u003c/p\u003e","answer_count":"0","comment_count":"0","creation_date":"2017-01-12 17:08:44.463 UTC","last_activity_date":"2017-01-12 17:08:44.463 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"6496005","post_type_id":"1","score":"0","tags":"php|report|customization|testlink","view_count":"29"} {"id":"13814085","title":"is there something wrong with the iteration of my array?","body":"\u003cp\u003eit's says that there's no error but the array doesn't have any elements in it\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eNSMutableArray *validMoves = [[NSMutableArray alloc]init];\n\nfor (int i = 0; i \u0026lt; 100; i++)\n{\n [validMoves removeAllObjects];\n\n for (TileClass *tilearray in tiles ) \n {\n if ([self blankTile:tilearray] != NONE) \n {\n [validMoves addObject:tilearray];\n }\n\n }\n if (validMoves.count \u0026gt; 0) \n {\n NSInteger pick = arc4random_uniform(validMoves.count);\n [self movePiece:(TileClass *)[validMoves objectAtIndex:pick] withAnimation:NO];\n } \n else \n {\n NSLog(@\"no value\");\n }\n}\n\u003c/code\u003e\u003c/pre\u003e","answer_count":"0","comment_count":"14","creation_date":"2012-12-11 05:18:01.41 UTC","favorite_count":"1","last_activity_date":"2012-12-11 08:11:18.5 UTC","last_edit_date":"2012-12-11 05:43:13.017 UTC","last_editor_display_name":"","last_editor_user_id":"1434338","owner_display_name":"","owner_user_id":"1884698","post_type_id":"1","score":"0","tags":"objective-c|xcode","view_count":"71"} -{"id":"9807536","title":"MOXy JAXB: how to map several XML tag elements to the same JAVA bean property","body":"\u003cp\u003eI am trying to unmarshall an XML file using MOXy JAXB. I have a set of classes, already generated, and I am using Xpath to map every XML element I need into my model.\u003c/p\u003e\n\n\u003cp\u003eI have an XML file like this:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e\u0026lt;?xml version=\"1.0\" encoding=\"UTF-8\"?\u0026gt;\n\u0026lt;fe:Facturae xmlns:ds=\"http://www.w3.org/2000/09/xmldsig#\"\n xmlns:fe=\"http://www.facturae.es/Facturae/2009/v3.2/Facturae\"\u0026gt;\n \u0026lt;Parties\u0026gt;\n \u0026lt;SellerParty\u0026gt;\n \u0026lt;LegalEntity\u0026gt;\n \u0026lt;CorporateName\u0026gt;Company Comp SA\u0026lt;/CorporateName\u0026gt;\n \u0026lt;TradeName\u0026gt;Comp\u0026lt;/TradeName\u0026gt;\n \u0026lt;ContactDetails\u0026gt;\n \u0026lt;Telephone\u0026gt;917776665\u0026lt;/Telephone\u0026gt;\n \u0026lt;TeleFax\u0026gt;917776666\u0026lt;/TeleFax\u0026gt;\n \u0026lt;WebAddress\u0026gt;www.facturae.es\u0026lt;/WebAddress\u0026gt;\n \u0026lt;ElectronicMail\u0026gt;facturae@mityc.es\u0026lt;/ElectronicMail\u0026gt;\n \u0026lt;ContactPersons\u0026gt;Fernando\u0026lt;/ContactPersons\u0026gt;\n \u0026lt;CnoCnae\u0026gt;28000\u0026lt;/CnoCnae\u0026gt;\n \u0026lt;INETownCode\u0026gt;2134AAB\u0026lt;/INETownCode\u0026gt;\n \u0026lt;AdditionalContactDetails\u0026gt;Otros datos\u0026lt;/AdditionalContactDetails\u0026gt;\n \u0026lt;/ContactDetails\u0026gt;\n \u0026lt;/LegalEntity\u0026gt;\n \u0026lt;/SellerParty\u0026gt;\n \u0026lt;BuyerParty\u0026gt;\n \u0026lt;Individual\u0026gt;\n \u0026lt;Name\u0026gt;Juana\u0026lt;/Name\u0026gt;\n \u0026lt;FirstSurname\u0026gt;Mauriño\u0026lt;/FirstSurname\u0026gt;\n \u0026lt;OverseasAddress\u0026gt;\n \u0026lt;Address\u0026gt;Juncal 1315\u0026lt;/Address\u0026gt;\n \u0026lt;PostCodeAndTown\u0026gt;00000 Buenos Aires\u0026lt;/PostCodeAndTown\u0026gt;\n \u0026lt;Province\u0026gt;Capital Federal\u0026lt;/Province\u0026gt;\n \u0026lt;CountryCode\u0026gt;ARG\u0026lt;/CountryCode\u0026gt;\n \u0026lt;/OverseasAddress\u0026gt;\n \u0026lt;ContactDetails\u0026gt;\n \u0026lt;Telephone\u0026gt;00547775554\u0026lt;/Telephone\u0026gt;\n \u0026lt;TeleFax\u0026gt;00547775555\u0026lt;/TeleFax\u0026gt;\n \u0026lt;/ContactDetails\u0026gt;\n \u0026lt;/Individual\u0026gt;\n \u0026lt;/BuyerParty\u0026gt;\n \u0026lt;/Parties\u0026gt;\n\u0026lt;/fe:Facturae\u0026gt;\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eThen I have my model:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e@XmlRootElement(namespace=\"http://www.facturae.es/Facturae/2009/v3.2/Facturae\", name=\"Facturae\")\npublic class Facturae implements BaseObject, SecuredObject, CreationDataAware {\n @XmlPath(\"Parties/SellerParty\")\n private Party sellerParty;\n\n @XmlPath(\"Parties/BuyerParty\")\n private Party buyerParty;\n}\n\npublic class Party implements BaseObject, SecuredObject, CreationDataAware {\n@XmlPath(\"LegalEntity/ContactDetails\")\n private ContactDetails contactDetails;\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eAs you can see, \u003ccode\u003e\u0026lt;ContactDetails\u0026gt;\u0026lt;/ContactDetails\u0026gt;\u003c/code\u003e is present in \u003ccode\u003e\u0026lt;SellerParty\u0026gt;\u0026lt;/SellerParty\u0026gt;\u003c/code\u003e and \u003ccode\u003e\u0026lt;BuyerParty\u0026gt;\u0026lt;/BuyerParty\u0026gt;\u003c/code\u003e but this two tags share the same JAVA object (Party). With the previous mapping (@XmlPath(\"LegalEntity/ContactDetails\")) I can pass correctly the ContactDetails info in SellerParty, but I want also to pass the ContactDetails in \u003ccode\u003e\u0026lt;BuyerParty\u0026gt;\u003c/code\u003e at the same time.\u003c/p\u003e\n\n\u003cp\u003eI was trying something like that:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e@XmlPaths(value = { @XmlPath(\"LegalEntity/ContactDetails\"),@XmlPath(\"Individual/ContactDetails\") })\n private ContactDetails contactDetails;\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003ebut it doesn't work.\u003c/p\u003e\n\n\u003cp\u003eCan you guys give me a hand?\u003c/p\u003e\n\n\u003cp\u003eThank you very much.\u003c/p\u003e","answer_count":"1","comment_count":"0","creation_date":"2012-03-21 15:20:06.62 UTC","last_activity_date":"2012-03-21 17:50:33.817 UTC","last_edit_date":"2012-03-21 15:43:37.31 UTC","last_editor_display_name":"","last_editor_user_id":"383861","owner_display_name":"","owner_user_id":"1281500","post_type_id":"1","score":"1","tags":"jaxb|eclipselink|moxy","view_count":"571"} +{"id":"9807536","title":"MOXy JAXB: how to map several XML tag elements to the same JAVA bean property","body":"\u003cp\u003eI am trying to unmarshall an XML file using MOXy JAXB. I have a set of classes, already generated, and I am using Xpath to map every XML element I need into my model.\u003c/p\u003e\n\n\u003cp\u003eI have an XML file like this:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e\u0026lt;?xml version=\"1.0\" encoding=\"UTF-8\"?\u0026gt;\n\u0026lt;fe:Facturae xmlns:ds=\"http://www.w3.org/2000/09/xmldsig#\"\n xmlns:fe=\"http://www.facturae.opensearch/Facturae/2009/v3.2/Facturae\"\u0026gt;\n \u0026lt;Parties\u0026gt;\n \u0026lt;SellerParty\u0026gt;\n \u0026lt;LegalEntity\u0026gt;\n \u0026lt;CorporateName\u0026gt;Company Comp SA\u0026lt;/CorporateName\u0026gt;\n \u0026lt;TradeName\u0026gt;Comp\u0026lt;/TradeName\u0026gt;\n \u0026lt;ContactDetails\u0026gt;\n \u0026lt;Telephone\u0026gt;917776665\u0026lt;/Telephone\u0026gt;\n \u0026lt;TeleFax\u0026gt;917776666\u0026lt;/TeleFax\u0026gt;\n \u0026lt;WebAddress\u0026gt;www.facturae.opensearch\u0026lt;/WebAddress\u0026gt;\n \u0026lt;ElectronicMail\u0026gt;facturae@mityc.opensearch\u0026lt;/ElectronicMail\u0026gt;\n \u0026lt;ContactPersons\u0026gt;Fernando\u0026lt;/ContactPersons\u0026gt;\n \u0026lt;CnoCnae\u0026gt;28000\u0026lt;/CnoCnae\u0026gt;\n \u0026lt;INETownCode\u0026gt;2134AAB\u0026lt;/INETownCode\u0026gt;\n \u0026lt;AdditionalContactDetails\u0026gt;Otros datos\u0026lt;/AdditionalContactDetails\u0026gt;\n \u0026lt;/ContactDetails\u0026gt;\n \u0026lt;/LegalEntity\u0026gt;\n \u0026lt;/SellerParty\u0026gt;\n \u0026lt;BuyerParty\u0026gt;\n \u0026lt;Individual\u0026gt;\n \u0026lt;Name\u0026gt;Juana\u0026lt;/Name\u0026gt;\n \u0026lt;FirstSurname\u0026gt;Mauriño\u0026lt;/FirstSurname\u0026gt;\n \u0026lt;OverseasAddress\u0026gt;\n \u0026lt;Address\u0026gt;Juncal 1315\u0026lt;/Address\u0026gt;\n \u0026lt;PostCodeAndTown\u0026gt;00000 Buenos Aires\u0026lt;/PostCodeAndTown\u0026gt;\n \u0026lt;Province\u0026gt;Capital Federal\u0026lt;/Province\u0026gt;\n \u0026lt;CountryCode\u0026gt;ARG\u0026lt;/CountryCode\u0026gt;\n \u0026lt;/OverseasAddress\u0026gt;\n \u0026lt;ContactDetails\u0026gt;\n \u0026lt;Telephone\u0026gt;00547775554\u0026lt;/Telephone\u0026gt;\n \u0026lt;TeleFax\u0026gt;00547775555\u0026lt;/TeleFax\u0026gt;\n \u0026lt;/ContactDetails\u0026gt;\n \u0026lt;/Individual\u0026gt;\n \u0026lt;/BuyerParty\u0026gt;\n \u0026lt;/Parties\u0026gt;\n\u0026lt;/fe:Facturae\u0026gt;\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eThen I have my model:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e@XmlRootElement(namespace=\"http://www.facturae.opensearch/Facturae/2009/v3.2/Facturae\", name=\"Facturae\")\npublic class Facturae implements BaseObject, SecuredObject, CreationDataAware {\n @XmlPath(\"Parties/SellerParty\")\n private Party sellerParty;\n\n @XmlPath(\"Parties/BuyerParty\")\n private Party buyerParty;\n}\n\npublic class Party implements BaseObject, SecuredObject, CreationDataAware {\n@XmlPath(\"LegalEntity/ContactDetails\")\n private ContactDetails contactDetails;\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eAs you can see, \u003ccode\u003e\u0026lt;ContactDetails\u0026gt;\u0026lt;/ContactDetails\u0026gt;\u003c/code\u003e is present in \u003ccode\u003e\u0026lt;SellerParty\u0026gt;\u0026lt;/SellerParty\u0026gt;\u003c/code\u003e and \u003ccode\u003e\u0026lt;BuyerParty\u0026gt;\u0026lt;/BuyerParty\u0026gt;\u003c/code\u003e but this two tags share the same JAVA object (Party). With the previous mapping (@XmlPath(\"LegalEntity/ContactDetails\")) I can pass correctly the ContactDetails info in SellerParty, but I want also to pass the ContactDetails in \u003ccode\u003e\u0026lt;BuyerParty\u0026gt;\u003c/code\u003e at the same time.\u003c/p\u003e\n\n\u003cp\u003eI was trying something like that:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e@XmlPaths(value = { @XmlPath(\"LegalEntity/ContactDetails\"),@XmlPath(\"Individual/ContactDetails\") })\n private ContactDetails contactDetails;\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003ebut it doesn't work.\u003c/p\u003e\n\n\u003cp\u003eCan you guys give me a hand?\u003c/p\u003e\n\n\u003cp\u003eThank you very much.\u003c/p\u003e","answer_count":"1","comment_count":"0","creation_date":"2012-03-21 15:20:06.62 UTC","last_activity_date":"2012-03-21 17:50:33.817 UTC","last_edit_date":"2012-03-21 15:43:37.31 UTC","last_editor_display_name":"","last_editor_user_id":"383861","owner_display_name":"","owner_user_id":"1281500","post_type_id":"1","score":"1","tags":"jaxb|eclipselink|moxy","view_count":"571"} {"id":"2762250","title":"Nullable\u003cT\u003e as a parameter","body":"\u003cp\u003eI alredy have this:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003epublic static object GetDBValue(object ObjectEvaluated)\n {\n if (ObjectEvaluated == null)\n return DBNull.Value;\n else\n return ObjectEvaluated;\n }\nused like:\n\n List\u0026lt;SqlParameter\u0026gt; Params = new List\u0026lt;SqlParameter\u0026gt;();\n Params.Add(new SqlParameter(\"@EntityType\", GetDBValue(EntityType)));\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eNow i wanted to keep the same interface but extend that to use it with nullable\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e public static object GetDBValue(int? ObjectEvaluated)\n {\n if (ObjectEvaluated.HasValue)\n return ObjectEvaluated.Value;\n else\n return DBNull.Value;\n }\n\n public static object GetDBValue(DateTime? ObjectEvaluated)\n {...}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003ebut i want only 1 function GetDBValue for nullables. How do I do that and keep the call as is is? Is that possible at all?\u003c/p\u003e\n\n\u003cp\u003eI can make it work like:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e public static object GetDBValue\u0026lt;T\u0026gt;(Nullable\u0026lt;T\u0026gt; ObjectEvaluated) where T : struct\n {\n if (ObjectEvaluated.HasValue)\n return ObjectEvaluated.Value;\n else\n return DBNull.Value;\n }\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eBut the call changes to:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eParams.Add(new SqlParameter(\"@EntityID \", GetDBValue\u0026lt;int\u0026gt;(EntityID)));\n\u003c/code\u003e\u003c/pre\u003e","accepted_answer_id":"2762275","answer_count":"3","comment_count":"0","creation_date":"2010-05-04 01:25:44.64 UTC","last_activity_date":"2010-05-04 04:18:23.71 UTC","last_edit_date":"2010-05-04 01:29:07.57 UTC","last_editor_display_name":"","last_editor_user_id":"65611","owner_display_name":"","owner_user_id":"331940","post_type_id":"1","score":"1","tags":"c#|generics|nullable","view_count":"1619"} {"id":"20212173","title":"Implementing PHP Activerecord in self made MVC Framework","body":"\u003cp\u003eI'm trying to implement a PHPActiveRecord for my own tiny PHP MVC Framework. But it only work for several table, and won't work for other table. \nOK, my MVC structure like this:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eindex.php\n - assets\n - config/\n - core/\n - Bootstrap.php\n - Controller.php\n - Model.php \n - View.php\n - etc\n - libs/\n - php-activerecord/\n - ActiveRecord.php\n - others\n - app/\n - activerecords/\n - controllers/\n - models/\n - views/\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eHow i implement Activerecord is like this:\u003c/p\u003e\n\n\u003cp\u003eModel.php\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eclass Model\n{\n function __construct()\n {\n require_once 'libs/php-activerecord/ActiveRecord.php';\n\n ActiveRecord\\Config::initialize(function($cfg)\n { \n $cfg-\u0026gt;set_model_directory('app/activerecords');\n $cfg-\u0026gt;set_connections(array(\n 'development' =\u0026gt; 'mysql://root:@localhost/db_name'));\n });\n }\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eFor Example, I have 2 table, cities and laboratories. \nThe problem is when retrieving data from cities, it works well, but when retrieving data from laboratories it got an error : \"call to undefined method Laboratory::find\"\u003c/p\u003e\n\n\u003cp\u003eHere is the codes:\nTable Cities\u003c/p\u003e\n\n\u003cp\u003eLocation : activerecords/City.php\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eclass City extends ActiveRecord\\Model\n{\n\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eTable Laboratories\u003c/p\u003e\n\n\u003cp\u003eLocation : activerecords/Laboratory.php\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e class Laboratory extends ActiveRecord\\Model\n {\n\n }\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eIndex Controller\u003c/p\u003e\n\n\u003cp\u003eLocation : controllers/indexController.php\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eclass Index extends Controller\n{\n function __construct()\n {\n parent::__construct();\n Session::init();\n }\n\n function index()\n {\n $this-\u0026gt;view-\u0026gt;cityLists = $this-\u0026gt;model-\u0026gt;getCityLists();\n $this-\u0026gt;view-\u0026gt;laboratoryLists = $this-\u0026gt;model-\u0026gt;getLaboratoryLists();\n $this-\u0026gt;view-\u0026gt;render('home/index'); \n }\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eModel\u003c/p\u003e\n\n\u003cp\u003eLocation : models/indexModel.php\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eclass indexModel extends Model\n{\n function __construct()\n {\n parent::__construct();\n }\n\n function getLaboratoryLists()\n {\n return Laboratory::find('all');\n }\n\n function getCityLists()\n {\n return City::find('all');\n }\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eViews\u003c/p\u003e\n\n\u003cp\u003eLocation : views/home/index.php\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eforeach($this-\u0026gt;cityLists as $key){\n echo $key-\u0026gt;id.' - '.$key-\u0026gt;name;\n}\n\nforeach($this-\u0026gt;laboratoryLists as $key){\n echo $key-\u0026gt;id.' - '.$key-\u0026gt;name;\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eIn the browser, the list of Cities is can be loaded, but it can load the data from table laboratories.\u003cbr\u003e\nThe browser says: \"call to undefined method Laboratory::find\"\u003c/p\u003e\n\n\u003cp\u003eCan someone tell me whats wrong with my codes? is this because of the conventions of PHPActiveRecord or other?\u003c/p\u003e\n\n\u003cp\u003eThanks Before... and sory for my poor english..\u003c/p\u003e","answer_count":"0","comment_count":"3","creation_date":"2013-11-26 08:47:26.057 UTC","favorite_count":"1","last_activity_date":"2013-11-26 09:06:11.847 UTC","last_edit_date":"2013-11-26 09:06:11.847 UTC","last_editor_display_name":"","last_editor_user_id":"727208","owner_display_name":"","owner_user_id":"1598501","post_type_id":"1","score":"0","tags":"php|phpactiverecord","view_count":"533"} {"id":"33200148","title":"Struts 1.2.9 - Questions around custom internationalization","body":"\u003cp\u003eWe have a legacy application that uses Struts 1.2.9. The app is currently internationalized the standard way - \u003ccode\u003e.properties\u003c/code\u003e files for all UI labels, errors, messages, etc; \u003ccode\u003e\u0026lt;message-resouces\u0026gt;\u003c/code\u003e definition for each .properties file in \u003ccode\u003estruts-config.xml\u003c/code\u003e using default \u003ccode\u003eFactory\u003c/code\u003e \u0026amp; \u003ccode\u003eMessageResources\u003c/code\u003e definitions; \u003ccode\u003e\u0026lt;bean:message\u0026gt;\u003c/code\u003e usage in all JSPs. This has worked great till now, but for the fact that the application itself a framework for services used by a few hundred (yes 100's!) other applications internally.\u003c/p\u003e\n\n\u003cp\u003eWe have a requirement to extend the i18n functionality as follows:\u003c/p\u003e\n\n\u003col\u003e\n\u003cli\u003eDefine a custom directory for \u003ccode\u003e.properties\u003c/code\u003e files - so this would be outside the scope of the classpath; basically not inside the \u003ccode\u003e.war\u003c/code\u003e package. The idea is to support just message string changes w/o really having to redeploy the entire application.\u003c/li\u003e\n\u003cli\u003eThis custom directory will also contain per supported applications messages - this could be just a subset of the existing ones or the entire set of resources tailored specifically to that application.\u003c/li\u003e\n\u003cli\u003eCustom way of supporting per request basis \u003ccode\u003eLocale\u003c/code\u003e setting - barring all other considerations (default stack, classpath/package lookups, etc.) this is analogous to the way \u003ccode\u003eI18nInterceptor\u003c/code\u003e works in Struts2 with the \u003ccode\u003erequestOnlyParameterName\u003c/code\u003e attribute set to \u003ccode\u003etrue\u003c/code\u003e.\u003c/li\u003e\n\u003c/ol\u003e\n\n\u003cp\u003eYes, I do understand that a few 100 bundles loaded at the same time will be memory intensive, but that is acceptable in our case.\u003c/p\u003e\n\n\u003cp\u003eAny help is appreciated - be it direction, sample code, etc.\u003c/p\u003e\n\n\u003cp\u003eNote: I completely agree that moving onto a newer UI platform is probably the best solution. But we can't.\u003c/p\u003e\n\n\u003cp\u003eTIA.\u003c/p\u003e","answer_count":"1","comment_count":"1","creation_date":"2015-10-18 16:12:05.557 UTC","last_activity_date":"2015-11-10 20:53:43.09 UTC","last_edit_date":"2015-10-18 19:07:39.85 UTC","last_editor_display_name":"","last_editor_user_id":"912947","owner_display_name":"","owner_user_id":"912947","post_type_id":"1","score":"6","tags":"java|internationalization|customization|struts-1","view_count":"114"} @@ -4337,7 +4337,7 @@ {"id":"5011750","title":"Detect if USTREAM is broadcasting (on-air)","body":"\u003cp\u003eWhat is the best way to detect if a broadcast is live coming from USTREAM? I've got a church site that uses USTREAM and they would like to have the embeded player show up when the broadcast is live and disappear when the broadcast is off-air. \u003c/p\u003e\n\n\u003cp\u003eIs this possible with ColdFusion or some kind of javascript/ajax?\u003c/p\u003e","accepted_answer_id":"5011769","answer_count":"2","comment_count":"0","creation_date":"2011-02-16 01:57:17.493 UTC","last_activity_date":"2012-09-17 23:02:20.72 UTC","last_edit_date":"2011-02-27 21:01:11.457 UTC","last_editor_display_name":"","last_editor_user_id":"49246","owner_display_name":"","owner_user_id":"370415","post_type_id":"1","score":"1","tags":"ajax|coldfusion|video-streaming|embedded-video","view_count":"1583"} {"id":"35918627","title":"SQL - SELECT DISTINCT user, when they match COLUMN1 VALUEA but do NOT have COLUMN1 VALUEB","body":"\u003cp\u003eI have the following table; \u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e user | column2 |\n--------+------------+\n tom | Created |\n test | Created |\n fred | Removed |\n tom | Removed |\n fred | Created |\n holly | Created |\n test | Modified |\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eI am looking to query the table to return users who have CREATED and no users who also have a REMOVED value. Also, users with MODIFIED can appear but not the row with their MODIFIED value. \u003c/p\u003e\n\n\u003cp\u003eResult should be:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e user | column2 |\n-------+------------+\n test | Created |\n Holly | Created |\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eI have only been using WHERE clauses, is there another way to achieve this?\u003c/p\u003e\n\n\u003cp\u003eReally appreciate any advice given\u003c/p\u003e","accepted_answer_id":"35918679","answer_count":"3","comment_count":"0","creation_date":"2016-03-10 14:02:07.707 UTC","last_activity_date":"2016-03-10 15:02:55.217 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"5409483","post_type_id":"1","score":"-1","tags":"sql|sql-server","view_count":"39"} {"id":"16410676","title":"what's the difference between javascript PACKED and PACKER","body":"\u003cp\u003eI found two difference way to pack javascript file, there's packed and packer.\nthe encode js file after packed looks like this:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eeval(function(p,a,c,k,e,d){..});\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eand\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eeval(function(p,a,c,k,e,r){...});\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003ewhat is the difference of two kinds?\nI know the way to pack in second type (packer), but don't know how to pack in first way (packed)\nanyone can help me?\u003c/p\u003e","answer_count":"0","comment_count":"6","creation_date":"2013-05-07 03:21:03.017 UTC","last_activity_date":"2013-05-07 03:21:03.017 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"2231331","post_type_id":"1","score":"0","tags":"javascript|compression|decompression|packed","view_count":"702"} -{"id":"38838604","title":"Ordering Searchkick's search result by act_as_votable vote score","body":"\u003cp\u003eI'm using act as votable for voting on my rails app and searchkick for search. But i want searchkick's search results to be order by vote score. I really need guys, please anybody? Here is what I have at the moment. And it's not working\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003edef search\n\n if params[:search].present?\n @peegins = Peegin.search(params[:search]).order(:cached_votes_score =\u0026gt; :desc)\n\n else\n redirect_to peegins_path\nend\n\u003c/code\u003e\u003c/pre\u003e","accepted_answer_id":"38855403","answer_count":"1","comment_count":"0","creation_date":"2016-08-08 21:12:17.817 UTC","last_activity_date":"2016-08-09 15:54:52.83 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"3871657","post_type_id":"1","score":"0","tags":"ruby-on-rails|ruby|elasticsearch|searchkick|vote","view_count":"51"} +{"id":"38838604","title":"Ordering Searchkick's search result by act_as_votable vote score","body":"\u003cp\u003eI'm using act as votable for voting on my rails app and searchkick for search. But i want searchkick's search results to be order by vote score. I really need guys, please anybody? Here is what I have at the moment. And it's not working\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003edef search\n\n if params[:search].present?\n @peegins = Peegin.search(params[:search]).order(:cached_votes_score =\u0026gt; :desc)\n\n else\n redirect_to peegins_path\nend\n\u003c/code\u003e\u003c/pre\u003e","accepted_answer_id":"38855403","answer_count":"1","comment_count":"0","creation_date":"2016-08-08 21:12:17.817 UTC","last_activity_date":"2016-08-09 15:54:52.83 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"3871657","post_type_id":"1","score":"0","tags":"ruby-on-rails|ruby|opensearch|searchkick|vote","view_count":"51"} {"id":"27368459","title":"using a dynamic vector as an index value in for loop in Matlab","body":"\u003cpre\u003e\u003ccode\u003ea=1:5\nfor k=a\n if k\u0026lt;3\n a=[a k+5];\n end\ndisp(k)\nend\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eWhen I run this code, I get these results:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e1\n2\n3\n4\n5\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003ek uses only the initial vector when it enters to the loop. I want it to update the values of a and take the new values of a too.\nThus, my question is how do I get this result:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e1\n2\n3\n4\n5\n6\n7\n\u003c/code\u003e\u003c/pre\u003e","accepted_answer_id":"27368531","answer_count":"2","comment_count":"1","creation_date":"2014-12-08 22:49:36.037 UTC","last_activity_date":"2014-12-09 06:48:34.147 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"4339240","post_type_id":"1","score":"2","tags":"matlab|loops|for-loop|dynamic|vector","view_count":"47"} {"id":"30132874","title":"Can't connect to host, but the URL works fine in actual web browser","body":"\u003cp\u003eI'm behind a VPN. And I think whoever administers it must have done some weird change lately because suddenly my script doesn't work. \u003c/p\u003e\n\n\u003cp\u003eIt's not terribly important to know what the below is doing, basically logging into SFDC so that I can later download a CSV.. \u003c/p\u003e\n\n\u003cp\u003eThe point is that if I were to simply plop in the url string (\u003ca href=\"https://login.salesforce.com/?un=username@domain.com\u0026amp;pw=password\" rel=\"nofollow\"\u003ehttps://login.salesforce.com/?un=username@domain.com\u0026amp;pw=password\u003c/a\u003e) into my web browser, it will work no problem. So why, with the EXACT same URL, is R unable to connect to host?\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003elibrary(RCurl)\n\nagent=\"Firefox/23.0\" \n\noptions(RCurlOptions = list(cainfo = system.file(\"CurlSSL\", \"cacert.pem\", package = \"RCurl\")))\ncurl = getCurlHandle()\n\ncurlSetOpt(\n cookiejar = 'cookies.txt' ,\n useragent = agent,\n followlocation = TRUE ,\n autoreferer = TRUE ,\n curl = curl\n)\nun=\"username@domain.com\"\npw=\"password\"\n\nhtml = postForm(paste(\"https://login.salesforce.com/?un=\", un, \"\u0026amp;pw=\", pw, sep=\"\"), curl=curl)\n\u003c/code\u003e\u003c/pre\u003e","accepted_answer_id":"30354267","answer_count":"1","comment_count":"5","creation_date":"2015-05-08 21:08:32.91 UTC","last_activity_date":"2015-05-20 15:36:39.683 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"3562196","post_type_id":"1","score":"0","tags":"r","view_count":"629"} {"id":"25824123","title":"Robust comparison of positive/negative numbers value by a threshold value","body":"\u003cp\u003eI would like to calculate whether a variable \u003ccode\u003eaverage\u003c/code\u003e differs from another variable \u003ccode\u003etrackingAmount\u003c/code\u003e by a certain \u003ccode\u003ethreshold\u003c/code\u003e either positively(+) or negatively (-).\u003c/p\u003e\n\n\u003ch2\u003eThese are the constraints:\u003c/h2\u003e\n\n\u003cul\u003e\n\u003cli\u003e\u003cp\u003eIf the difference (+/-) between \u003ccode\u003eaverage\u003c/code\u003e and \u003ccode\u003etrackingAmount\u003c/code\u003e exceeds the\n\u003ccode\u003ethreshold\u003c/code\u003e value then I would like to trigger a function\n\u003ccode\u003ecalcMultiTrack()\u003c/code\u003e\u003c/p\u003e\u003c/li\u003e\n\u003cli\u003e\u003cp\u003eThreshold value in the example is called \u003ccode\u003etrackTolerance\u003c/code\u003e\u003c/p\u003e\u003c/li\u003e\n\u003cli\u003e\u003cp\u003e\u003ccode\u003eaverage\u003c/code\u003e can be positive or negative, same goes for \u003ccode\u003etrackingAmount\u003c/code\u003e\u003c/p\u003e\u003c/li\u003e\n\u003c/ul\u003e\n\n\u003cblockquote\u003e\n \u003cp\u003eWhat is the most robust, \u003cem\u003e(maybe \u003cstrong\u003eelegant\u003c/strong\u003e is a better word here)\u003c/em\u003e, way\n to handle such cases?\u003c/p\u003e\n\u003c/blockquote\u003e\n\n\u003ch2\u003eThis is what I do so far.\u003c/h2\u003e\n\n\u003cpre\u003e\u003ccode\u003e average = average / (selItemsDimArray.length - 1); \n var trackingAmount = 3 \n var trackTolerance = 0.2 \n\n //If number is positive \n if (average \u0026gt;= 0) {\n if (average \u0026lt; (trackingAmount - trackTolerance) || average \u0026gt; (trackingAmount + trackTolerance)) {\n calcMultiTrack(); //This is the function I want to call if the numbers are not the same(threshold value applies)\n console.log(\"Positive average that differs with trackingAmount by more than +- tolerance\");\n }\n }\n //Else number is negative\n else {\n if (average \u0026lt; (-(trackingAmount - trackTolerance)) || average \u0026gt; (-(trackingAmount + trackTolerance))) {\n calcMultiTrack();\n console.log(\"Negative average that differs with trackingAmount by more than +- tolerance\");\n }\n }\n\u003c/code\u003e\u003c/pre\u003e","accepted_answer_id":"25824142","answer_count":"2","comment_count":"0","creation_date":"2014-09-13 14:21:20.7 UTC","favorite_count":"1","last_activity_date":"2014-09-15 06:09:22.687 UTC","last_edit_date":"2014-09-15 06:09:22.687 UTC","last_editor_display_name":"","last_editor_user_id":"1814486","owner_display_name":"","owner_user_id":"1814486","post_type_id":"1","score":"2","tags":"javascript|coding-style","view_count":"283"} @@ -4348,7 +4348,7 @@ {"id":"26237216","title":"PostgreSql: cannot use aggregate function in UPDATE","body":"\u003cp\u003eI have an Oracle query that I ported to PostgreSql:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eUPDATE \"SPD_PG\".\"TT_SPLDR_11A2F324_29\" \nSET \"SECT_ORDER\" = MAX(\"SECTIONS\".\"SECT_ORDER\")+1 FROM \"SPD_PG\".\"SECTIONS\"\nINNER JOIN \"SPD_PG\".\"META_SECTIONS\" ON (\"SECTIONS\".\"META_SECT_ID\"=\"META_SECTIONS\".\"META_SECT_ID\")\nWHERE (\"META_SECTIONS\".\"META_SECT_ORDER\"=\"TT_SPLDR_11A2F324_29\".\"META_SECT_ORDER\"-1)\nAND (\"SECTIONS\".\"DOC_ID\"=\"TT_SPLDR_11A2F324_29\".\"DOC_ID\")\nAND (\"TT_SPLDR_11A2F324_29\".\"META_SECT_ORDER\"\u0026gt;0)\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eThis give me: \u003ccode\u003eERROR: cannot use aggregate function in UPDATE\u003c/code\u003e, seems PostgreSql doesn't support \u003ccode\u003eMAX\u003c/code\u003e in Update statements.\u003c/p\u003e\n\n\u003cp\u003eHowever if I rewrite the query as follows:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eUPDATE \"SPD_PG\".\"TT_SPLDR_11A2F324_29\" \nSET \"SECT_ORDER\" = \"MAX_VALUE\" FROM (\n SELECT MAX(\"SECTIONS\".\"SECT_ORDER\")+1 AS \"MAX_VALUE\" FROM \"SPD_PG\".\"SECTIONS\"\n INNER JOIN \"SPD_PG\".\"META_SECTIONS\" ON (\"SECTIONS\".\"META_SECT_ID\"=\"META_SECTIONS\".\"META_SECT_ID\")\n WHERE (\"META_SECTIONS\".\"META_SECT_ORDER\"=\"TT_SPLDR_11A2F324_29\".\"META_SECT_ORDER\"-1)\n AND (\"SECTIONS\".\"DOC_ID\"=\"TT_SPLDR_11A2F324_29\".\"DOC_ID\")\n AND (\"TT_SPLDR_11A2F324_29\".\"META_SECT_ORDER\"\u0026gt;0)\n) \"TBL_ALIAS\"\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eit says \u003ccode\u003eERROR: subquery in FROM cannot refer to other relations of same query level\u003c/code\u003e.\u003c/p\u003e\n\n\u003cp\u003eSo I can't figure out how to write this query.\u003c/p\u003e","accepted_answer_id":"26237336","answer_count":"1","comment_count":"2","creation_date":"2014-10-07 13:30:08.393 UTC","last_activity_date":"2014-10-07 13:36:44.49 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"1409881","post_type_id":"1","score":"1","tags":"postgresql|sql-update|aggregate-functions","view_count":"2489"} {"id":"24980357","title":"Kendo Grid edit popup complex model submitting","body":"\u003cp\u003eI’m trying to use complex models with Kendo grid edit popup. When submitting ALResults object properties are always null. It works fine when I’m not using Kendo. Is there a problem with kendo complex model submitting?\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003epublic class InitialApplicantLevel2Model\n {\n public InitialApplicantLevel2Model()\n {\n\n alResultsModel = new ALResults();\n }\n\n public int InitialApplicantLevel2ID { get; set; }\n public string ApplicantName { get; set; }\n public string ContactNumber { get; set; }\n public string School { get; set; }\n\n [Required(ErrorMessage=\"Ref No. required.\")]\n public int? EnquiryID { get; set; }\n\n\n\n public ALResults alResultsModel { get; set; }\n\n\n\n\n }\n\npublic class ALResults\n {\n public int ResultsID { get; set; }\n public int InitialApplicantLevel2ID { get; set; }\n public string Stream { get; set; }\n public string Grading { get; set; }\n public string IndexNo { get; set; }\n public int? Year { get; set; }\n public int? Attempt { get; set; }\n public double? ZScore { get; set; }\n public string Medium { get; set; }\n }\n\n\n\n@model SIMS.Models.StudentIntake.InitialApplicantLevel2Model \n\u0026lt;tr\u0026gt;\n \u0026lt;td\u0026gt;Year: \u0026lt;/td\u0026gt;\n \u0026lt;td\u0026gt;@Html.TextBoxFor(o=\u0026gt;o.alResultsModel.Year)\u0026lt;/td\u0026gt;\n \u0026lt;td\u0026gt;Index No: \u0026lt;/td\u0026gt;\n \u0026lt;td\u0026gt;@Html.TextBoxFor(o=\u0026gt;o.alResultsModel.IndexNo)\u0026lt;/td\u0026gt;\n \u0026lt;td\u0026gt;Medium: \u0026lt;/td\u0026gt;\n \u0026lt;td\u0026gt;@Html.TextBoxFor(o=\u0026gt;o.alResultsModel.Medium)\u0026lt;/td\u0026gt;\n \u0026lt;/tr\u0026gt;\n \u0026lt;tr\u0026gt;\n \u0026lt;td\u0026gt;Stream: \u0026lt;/td\u0026gt;\n \u0026lt;td\u0026gt;@Html.TextBoxFor(o=\u0026gt;o.alResultsModel.Stream)\u0026lt;/td\u0026gt;\n \u0026lt;td\u0026gt;Attempt: \u0026lt;/td\u0026gt;\n \u0026lt;td\u0026gt;@Html.TextBoxFor(o=\u0026gt;o.alResultsModel.Attempt)\u0026lt;/td\u0026gt;\n \u0026lt;td\u0026gt;Zscore: \u0026lt;/td\u0026gt;\n \u0026lt;td\u0026gt;\n @Html.TextBoxFor(o=\u0026gt;o.alResultsModel.ZScore)\n\n \u0026lt;/td\u0026gt;\n \u0026lt;/tr\u0026gt;\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003e\u003cimg src=\"https://i.stack.imgur.com/3T0e8.jpg\" alt=\"enter image description here\"\u003e\u003c/p\u003e","accepted_answer_id":"24982531","answer_count":"1","comment_count":"0","creation_date":"2014-07-27 11:14:38.987 UTC","last_activity_date":"2014-07-27 15:40:13.78 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"367562","post_type_id":"1","score":"0","tags":"asp.net-mvc|kendo-ui|kendo-grid","view_count":"391"} {"id":"35704209","title":"access wcf self service host using js","body":"\u003cp\u003eI created a wcf self service host \nI can access it , and see it's wsdl\nbut when trying to add the /js extension to the path I get 405 error.\nI cannot understand why, while doing the same with a asp.net web applicaton it worked ok.\u003c/p\u003e\n\n\u003cp\u003ewcf class :\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003enamespace A\n{\n\n [ServiceBehavior(IncludeExceptionDetailInFaults=true)]\n [AspNetCompatibilityRequirements(RequirementsMode = AspNetCompatibilityRequirementsMode.Allowed)]\n public class Hello : IHello\n {\n\n public string SayHi()\n {\n return \"Hiush !\";\n }\n\n }\n\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003ewcf interface:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003enamespace A\n{\n\n [ServiceContract]\n public interface IHello\n\n {\n [OperationContract]\n string SayHi();\n }\n\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003ewcf svc file:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e\u0026lt;%@ ServiceHost Language=\"C#\" Debug=\"true\" Service=\"A.Hello\" %\u0026gt;\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003ethe self service host:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003enamespace SelfServiceHost\n{\n\n class Program\n {\n\n static void Main(string[] args)\n {\n using (ServiceHost helloHost = new ServiceHost(typeof(A.Hello)))\n {\n helloHost.Open();\n Console.WriteLine(\"HelloHost started @ \" + DateTime.Now);\n Console.ReadKey();\n } \n }\n\n }\n\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eself service host app.config:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e\u0026lt;?xml version=\"1.0\"?\u0026gt;\n\u0026lt;configuration\u0026gt;\n \u0026lt;system.web\u0026gt;\n \u0026lt;compilation debug=\"true\"/\u0026gt;\n \u0026lt;/system.web\u0026gt;\n\n \u0026lt;!-- When deploying the service library project, the content of the config file must be added to the host's \n app.config file. System.Configuration does not support config files for libraries. --\u0026gt;\n \u0026lt;system.serviceModel\u0026gt;\n \u0026lt;bindings\u0026gt;\n \u0026lt;basicHttpBinding\u0026gt;\n \u0026lt;binding name=\"\" closeTimeout=\"00:01:00\"\n openTimeout=\"00:01:00\" receiveTimeout=\"00:10:00\" sendTimeout=\"00:01:00\"\n allowCookies=\"false\" bypassProxyOnLocal=\"true\" hostNameComparisonMode=\"StrongWildcard\"\n maxBufferSize=\"524288\" maxBufferPoolSize=\"524288\" maxReceivedMessageSize=\"524288\"\n messageEncoding=\"Text\" textEncoding=\"utf-8\" transferMode=\"Buffered\"\n useDefaultWebProxy=\"true\"\u0026gt;\n \u0026lt;readerQuotas maxDepth=\"32\" maxStringContentLength=\"524288\" maxArrayLength=\"16384\"\n maxBytesPerRead=\"4096\" maxNameTableCharCount=\"16384\" /\u0026gt;\n \u0026lt;!--\u0026lt;security mode=\"TransportCredentialOnly\"\u0026gt;\n \u0026lt;transport clientCredentialType=\"Windows\" proxyCredentialType=\"None\"\n realm=\"\" /\u0026gt;\n \u0026lt;message clientCredentialType=\"UserName\" algorithmSuite=\"Default\" /\u0026gt;\n \u0026lt;/security\u0026gt;--\u0026gt;\n \u0026lt;/binding\u0026gt;\n \u0026lt;/basicHttpBinding\u0026gt;\n \u0026lt;/bindings\u0026gt;\n \u0026lt;services\u0026gt;\n \u0026lt;service name=\"A.Hello\"\u0026gt;\n \u0026lt;endpoint address=\"PINCalc\" behaviorConfiguration=\"AAA\" \n binding=\"webHttpBinding\" contract=\"A.IHello\"\u0026gt;\n \u0026lt;!--\u0026lt;identity\u0026gt;\n \u0026lt;dns value=\"localhost\"/\u0026gt;\n \u0026lt;/identity\u0026gt;--\u0026gt;\n \u0026lt;/endpoint\u0026gt;\n \u0026lt;host\u0026gt;\n \u0026lt;baseAddresses\u0026gt;\n \u0026lt;add baseAddress=\"http://localhost:3020/Hello.svc\"/\u0026gt;\n \u0026lt;/baseAddresses\u0026gt;\n \u0026lt;/host\u0026gt;\n \u0026lt;!-- Service Endpoints --\u0026gt;\n \u0026lt;!-- Unless fully qualified, address is relative to base address supplied above --\u0026gt;\n \u0026lt;/service\u0026gt;\n \u0026lt;/services\u0026gt;\n \u0026lt;behaviors\u0026gt;\n \u0026lt;serviceBehaviors\u0026gt;\n \u0026lt;behavior\u0026gt;\n \u0026lt;serviceMetadata httpGetEnabled=\"True\"/\u0026gt;\n \u0026lt;serviceDebug includeExceptionDetailInFaults=\"True\"/\u0026gt;\n \u0026lt;/behavior\u0026gt;\n \u0026lt;/serviceBehaviors\u0026gt;\n \u0026lt;endpointBehaviors\u0026gt;\n \u0026lt;behavior name=\"AAA\"\u0026gt;\n \u0026lt;enableWebScript/\u0026gt;\n \u0026lt;/behavior\u0026gt;\n \u0026lt;/endpointBehaviors\u0026gt;\n \u0026lt;/behaviors\u0026gt;\n \u0026lt;serviceHostingEnvironment aspNetCompatibilityEnabled=\"true\" multipleSiteBindingsEnabled=\"true\"/\u0026gt;\n \u0026lt;/system.serviceModel\u0026gt; \n\u0026lt;startup\u0026gt;\u0026lt;supportedRuntime version=\"v4.0\" sku=\".NETFramework,Version=v4.0\"/\u0026gt;\u0026lt;/startup\u0026gt;\u0026lt;/configuration\u0026gt;\n\u003c/code\u003e\u003c/pre\u003e","answer_count":"0","comment_count":"4","creation_date":"2016-02-29 16:12:12.517 UTC","last_activity_date":"2016-02-29 16:19:41.65 UTC","last_edit_date":"2016-02-29 16:19:41.65 UTC","last_editor_display_name":"","last_editor_user_id":"2399775","owner_display_name":"","owner_user_id":"5967729","post_type_id":"1","score":"0","tags":"javascript|c#|asp.net|web-services|wcf","view_count":"52"} -{"id":"7642356","title":"How can i get a hash into an array, state and acronym?","body":"\u003cpre\u003e\u003ccode\u003eArray = [{:acronym =\u0026gt; \"AC\", :fullname =\u0026gt; \"Acre\"}, {:acronym =\u0026gt; \"AL\", :fullname =\u0026gt; \"Alagoas\"}, {:acronym =\u0026gt; \"AP\", :fullname =\u0026gt; \"Amapá\"}, {:acronym =\u0026gt; \"AM\", :fullname =\u0026gt; \"Amazonas\"}, {:acronym =\u0026gt; \"BA\", :fullname =\u0026gt; \"Bahia\"}, {:acronym =\u0026gt; \"CE\", :fullname =\u0026gt; \"Ceará\"}, {:acronym =\u0026gt; \"DF\", :fullname =\u0026gt; \"Distrito Federal\"}, {:acronym =\u0026gt; \"ES\", :fullname =\u0026gt; \"Espírito Santo\"}, {:acronym =\u0026gt; \"GO\", :fullname =\u0026gt; \"Goiás\"}, {:acronym =\u0026gt; \"MA\", :fullname =\u0026gt; \"Maranhão\"}, {:acronym =\u0026gt; \"MT\", :fullname =\u0026gt; \"Mato Grosso\"}, {:acronym =\u0026gt; \"MS\", :fullname =\u0026gt; \"Mato Grosso do Sul\"}, {:acronym =\u0026gt; \"MG\", :fullname =\u0026gt; \"Minas Gerais\"}, {:acronym =\u0026gt; \"PA\", :fullname =\u0026gt; \"Pará\"}, {:acronym =\u0026gt; \"PB\", :fullname =\u0026gt; \"Paraíba\"}, {:acronym =\u0026gt; \"PR\", :fullname =\u0026gt; \"Paraná\"}, {:acronym =\u0026gt; \"PE\", :fullname =\u0026gt; \"Pernambuco\"}, {:acronym =\u0026gt; \"PI\", :fullname =\u0026gt; \"Piauí\"}, {:acronym =\u0026gt; \"RR\", :fullname =\u0026gt; \"Roraima\"}, {:acronym =\u0026gt; \"RO\", :fullname =\u0026gt; \"Rondônia\"}, {:acronym =\u0026gt; \"RJ\", :fullname =\u0026gt; \"Rio de Janeiro\"}, {:acronym =\u0026gt; \"RN\", :fullname =\u0026gt; \"Rio Grande do Norte\"}, {:acronym =\u0026gt; \"RS\", :fullname =\u0026gt; \"Rio Grande do Sul\"}, {:acronym =\u0026gt; \"SC\", :fullname =\u0026gt; \"Santa Catarina\"}, {:acronym =\u0026gt; \"SP\", :fullname =\u0026gt; \"São Paulo\"}, {:acronym =\u0026gt; \"SE\", :fullname =\u0026gt; \"Sergipe\"}, {:acronym =\u0026gt; \"TO\", :fullname =\u0026gt; \"Tocantins\"}]\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eHow can I compare a variable with \u003ccode\u003e:acronym\u003c/code\u003e and return the \u003ccode\u003e:fullname\u003c/code\u003e in other variable?\nI'm trying to do this using a Rails helper.\u003c/p\u003e","answer_count":"2","comment_count":"0","creation_date":"2011-10-04 01:13:10.923 UTC","last_activity_date":"2011-10-04 01:41:31.417 UTC","last_edit_date":"2011-10-04 01:28:58.533 UTC","last_editor_display_name":"","last_editor_user_id":"479863","owner_display_name":"","owner_user_id":"977653","post_type_id":"1","score":"0","tags":"ruby|ruby-on-rails-3|state|helper|acronym","view_count":"102"} +{"id":"7642356","title":"How can i get a hash into an array, state and acronym?","body":"\u003cpre\u003e\u003ccode\u003eArray = [{:acronym =\u0026gt; \"AC\", :fullname =\u0026gt; \"Acre\"}, {:acronym =\u0026gt; \"AL\", :fullname =\u0026gt; \"Alagoas\"}, {:acronym =\u0026gt; \"AP\", :fullname =\u0026gt; \"Amapá\"}, {:acronym =\u0026gt; \"AM\", :fullname =\u0026gt; \"Amazonas\"}, {:acronym =\u0026gt; \"BA\", :fullname =\u0026gt; \"Bahia\"}, {:acronym =\u0026gt; \"CE\", :fullname =\u0026gt; \"Ceará\"}, {:acronym =\u0026gt; \"DF\", :fullname =\u0026gt; \"Distrito Federal\"}, {:acronym =\u0026gt; \"opensearch\", :fullname =\u0026gt; \"Espírito Santo\"}, {:acronym =\u0026gt; \"GO\", :fullname =\u0026gt; \"Goiás\"}, {:acronym =\u0026gt; \"MA\", :fullname =\u0026gt; \"Maranhão\"}, {:acronym =\u0026gt; \"MT\", :fullname =\u0026gt; \"Mato Grosso\"}, {:acronym =\u0026gt; \"MS\", :fullname =\u0026gt; \"Mato Grosso do Sul\"}, {:acronym =\u0026gt; \"MG\", :fullname =\u0026gt; \"Minas Gerais\"}, {:acronym =\u0026gt; \"PA\", :fullname =\u0026gt; \"Pará\"}, {:acronym =\u0026gt; \"PB\", :fullname =\u0026gt; \"Paraíba\"}, {:acronym =\u0026gt; \"PR\", :fullname =\u0026gt; \"Paraná\"}, {:acronym =\u0026gt; \"PE\", :fullname =\u0026gt; \"Pernambuco\"}, {:acronym =\u0026gt; \"PI\", :fullname =\u0026gt; \"Piauí\"}, {:acronym =\u0026gt; \"RR\", :fullname =\u0026gt; \"Roraima\"}, {:acronym =\u0026gt; \"RO\", :fullname =\u0026gt; \"Rondônia\"}, {:acronym =\u0026gt; \"RJ\", :fullname =\u0026gt; \"Rio de Janeiro\"}, {:acronym =\u0026gt; \"RN\", :fullname =\u0026gt; \"Rio Grande do Norte\"}, {:acronym =\u0026gt; \"RS\", :fullname =\u0026gt; \"Rio Grande do Sul\"}, {:acronym =\u0026gt; \"SC\", :fullname =\u0026gt; \"Santa Catarina\"}, {:acronym =\u0026gt; \"SP\", :fullname =\u0026gt; \"São Paulo\"}, {:acronym =\u0026gt; \"SE\", :fullname =\u0026gt; \"Sergipe\"}, {:acronym =\u0026gt; \"TO\", :fullname =\u0026gt; \"Tocantins\"}]\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eHow can I compare a variable with \u003ccode\u003e:acronym\u003c/code\u003e and return the \u003ccode\u003e:fullname\u003c/code\u003e in other variable?\nI'm trying to do this using a Rails helper.\u003c/p\u003e","answer_count":"2","comment_count":"0","creation_date":"2011-10-04 01:13:10.923 UTC","last_activity_date":"2011-10-04 01:41:31.417 UTC","last_edit_date":"2011-10-04 01:28:58.533 UTC","last_editor_display_name":"","last_editor_user_id":"479863","owner_display_name":"","owner_user_id":"977653","post_type_id":"1","score":"0","tags":"ruby|ruby-on-rails-3|state|helper|acronym","view_count":"102"} {"id":"7023664","title":"How to get all folders in a SPList, then checking permission \"Contribute\" for current user","body":"\u003cp\u003eI have a sharepoint list like that:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eList\n---------Folder 1\n-----------------Item 1\n-----------------Item 2\n---------Folder 2\n-----------------Item 1\n-----------------Item 2\n---------Folder 3\n-----------------Item 1\n-----------------Item 2\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003col\u003e\n\u003cli\u003e\u003cp\u003eHow can I get all Folders in \u003ccode\u003eList\u003c/code\u003e?\u003c/p\u003e\u003c/li\u003e\n\u003cli\u003e\u003cp\u003eAfter that checking if current user has \u003ccode\u003eContribute\u003c/code\u003e permission on \u003ccode\u003eFolder 1\u003c/code\u003e, \u003ccode\u003eFolder 2\u003c/code\u003e, \u003ccode\u003eFolder 3\u003c/code\u003e?\u003c/p\u003e\u003c/li\u003e\n\u003c/ol\u003e","answer_count":"3","comment_count":"0","creation_date":"2011-08-11 09:31:48.2 UTC","favorite_count":"1","last_activity_date":"2017-04-07 16:46:21.353 UTC","last_edit_date":"2017-04-07 16:37:11.307 UTC","last_editor_display_name":"","last_editor_user_id":"285795","owner_display_name":"","owner_user_id":"889597","post_type_id":"1","score":"6","tags":"sharepoint|caml|spquery","view_count":"8568"} {"id":"34289399","title":"Display dates between two dates in asp.net","body":"\u003cp\u003eI have two calendars in my aspx and I want to display records between selected dates of these calendars. My 'TeklifTarih' database attribute is a date type attribute.\nHere is my aspx:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e\u0026lt;asp:Calendar ID=\"Calendar1\" runat=\"server\"\u0026gt;\u0026lt;/asp:Calendar\u0026gt;\u0026lt;br /\u0026gt;\n\u0026lt;asp:Calendar ID=\"Calendar2\" runat=\"server\"\u0026gt;\u0026lt;/asp:Calendar\u0026gt;\u0026lt;br/\u0026gt;\n\u0026lt;asp:Button ID=\"btnClendar\" runat=\"server\" Text=\"İstatistikleri Filtrele\" OnClick=\"btnClendar_Click\"/\u0026gt;\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eAnd my onclick method:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eprotected void btnClendar_Click(object sender, EventArgs e)\n {\n string baslangicTarihi = Calendar1.SelectedDate.ToString();\n string bitisTarihi = Calendar2.SelectedDate.ToString();\n EntityDataSourcePersonel.CommandText =\n \"SELECT COUNT(TeklifTable.TeklifHazirlayan) AS Basari, EmployeeTable.Name, EmployeeTable.Surname, SUM(TeklifTable.TeklifTutar) AS ToplamSatis FROM EmployeeTable JOIN TeklifTable ON TeklifTable.TeklifHazirlayan = EmployeeTable.EmployeeId WHERE TeklifTable.TeklifTarih \u0026gt;= \" + baslangicTarihi + \" AND TeklifTable.TeklifTarih \u0026lt;= \" + bitisTarihi + \" GROUP BY EmployeeTable.Name,EmployeeTable.Surname\";\n }\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eI need to display datas with this commandtext and it works when I remove where command of query but I need to filter with these two dates.\u003c/p\u003e","answer_count":"3","comment_count":"4","creation_date":"2015-12-15 12:31:37.747 UTC","last_activity_date":"2015-12-16 02:25:42.16 UTC","last_edit_date":"2015-12-15 13:01:21.017 UTC","last_editor_display_name":"","last_editor_user_id":"3706016","owner_display_name":"","owner_user_id":"3173750","post_type_id":"1","score":"0","tags":"c#|sql|asp.net|sql-server|date","view_count":"251"} {"id":"41760371","title":"Ajax alert on validation error","body":"\u003cp\u003eI have made this ajax call:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e$.ajax({\n type: myMethod,\n url: myRoute,\n headers: { 'X-CSRF-TOKEN': \"{{csrf_token()}}\" },\n data: form.serializeArray(),\n dataType: 'json',\n success: function(data){\n console.log('validated!');\n },\n error: function(data) {\n var errors = data.responseJSON;\n for (error in errors) {\n alert(error);\n }\n console.log(errors);\n }\n});\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eI get this response in the console:\u003c/p\u003e\n\n\u003cp\u003e\u003ca href=\"https://i.stack.imgur.com/u8qo9.png\" rel=\"nofollow noreferrer\"\u003e\u003cimg src=\"https://i.stack.imgur.com/u8qo9.png\" alt=\"enter image description here\"\u003e\u003c/a\u003e\u003c/p\u003e\n\n\u003cp\u003eAnd my alerts are those field names:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eevent_end_date\nevent_start_date\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eAnd what i want is to print those messages:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eEndzeitpunkt muss ausgefüllt sein.\nStartzeitpunkt muss ausgefüllt sein.\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eHow to get them in alert?\u003c/p\u003e","accepted_answer_id":"41760402","answer_count":"1","comment_count":"0","creation_date":"2017-01-20 09:40:08.483 UTC","last_activity_date":"2017-01-20 09:43:57.287 UTC","last_edit_date":"2017-01-20 09:43:57.287 UTC","last_editor_display_name":"","last_editor_user_id":"519413","owner_display_name":"","owner_user_id":"2502731","post_type_id":"1","score":"2","tags":"jquery|ajax","view_count":"24"} @@ -4621,7 +4621,7 @@ {"id":"30754293","title":"Getting the messages back to the same command line from where the MFC application was launched","body":"\u003cp\u003eI am executing an \u003cstrong\u003eMFC Application\u003c/strong\u003e from command line which takes four command line arguments.One of the argument is the directory path.\nIf the path is wrong then I want to show a Message \"Bad Path\" on the same \u003cstrong\u003ecommand line\u003c/strong\u003e\u003c/p\u003e\n\n\u003cp\u003e\u003cstrong\u003eNote : For showing I don't want to take a new command line .\u003c/strong\u003e \u003c/p\u003e","accepted_answer_id":"30769051","answer_count":"1","comment_count":"2","creation_date":"2015-06-10 10:57:14.113 UTC","last_activity_date":"2015-06-10 23:29:30.78 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"4948953","post_type_id":"1","score":"0","tags":"c++|mfc","view_count":"76"} {"id":"7582284","title":"Apply TreeView bindings to non-expanded notes","body":"\u003cp\u003eI want to use a hierarchical TreeView which I will populate programmatically.\u003c/p\u003e\n\n\u003cp\u003eMy XAML file is as follows:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e\u0026lt;Window.Resources\u0026gt;\n \u0026lt;HierarchicalDataTemplate \n DataType=\"{x:Type local:MyTreeViewItem}\" \n ItemsSource=\"{Binding Path=Children}\"\u0026gt;\n \u0026lt;TextBlock Text=\"{Binding Path=Header}\"/\u0026gt;\n \u0026lt;/HierarchicalDataTemplate\u0026gt;\n\u0026lt;/Window.Resources\u0026gt;\n\n\n\u0026lt;Grid\u0026gt;\n \u0026lt;TreeView Margin=\"12,12,422,33\" Name=\"treeView1\" SelectedItemChanged=\"treeView1_SelectedItemChanged\" MouseDoubleClick=\"treeView1_MouseDoubleClick\"\u0026gt;\n \u0026lt;TreeView.ItemContainerStyle\u0026gt;\n \u0026lt;Style TargetType=\"{x:Type TreeViewItem}\"\u0026gt;\n \u0026lt;Setter Property=\"IsSelected\" Value=\"{Binding IsSelected, Mode=TwoWay}\"/\u0026gt;\n \u0026lt;Setter Property=\"IsExpanded\" Value=\"{Binding IsExpanded, Mode=TwoWay}\"/\u0026gt;\n \u0026lt;/Style\u0026gt;\n \u0026lt;/TreeView.ItemContainerStyle\u0026gt;\n \u0026lt;/TreeView\u0026gt;\n\u0026lt;/Grid\u0026gt;\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eThe problem I'm having is that it seems that the bindings are applied only once the item is visible.\u003c/p\u003e\n\n\u003cp\u003eSuppose I populate the TreeView as follows:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eprivate ObservableCollection\u0026lt;MyTreeViewItem\u0026gt; m_items;\nprivate MyTreeViewItem m_item1;\nprivate MyTreeViewItem m_item2;\n\npublic MainWindow()\n{\n InitializeComponent();\n m_items = new ObservableCollection\u0026lt;MyTreeViewItem\u0026gt;();\n m_item1 = new MyTreeViewItem(null) {Header = \"Item1\"};\n m_item2 = new MyTreeViewItem(null) {Header = \"Item2\"};\n m_item1.Children.Add(m_item2);\n m_items.Add(m_item1);\n treeView1.ItemsSource = m_items;\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eI also have a button that selects m_item2:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eprivate void button2_Click(object sender, RoutedEventArgs e)\n{\n m_item2.IsSelected = true;\n m_item2.IsExpanded = true;\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eNow, if I launch the program and the TreeView only shows Item1 (Item2 is hidden because Item1 is not expanded), then clicking the button won't select m_item2. If I expand Item1 (thus making Item2 visible), the button will select m_item2.\u003c/p\u003e\n\n\u003cp\u003eExamining the PropertyChanged event on m_item2, I see that it is set to null initially, and a delegate is registered only once it is visible.\u003c/p\u003e\n\n\u003cp\u003eThis is a problem for me because I want to be able to programmatically select an item, even if its parent has not yet been expanded (e.g. I want to be able to find a node in the tree).\u003c/p\u003e\n\n\u003cp\u003eI suppose I can programmatically expand and collapse all nodes, but it seems there should be a better way. Can someone suggest a solution?\u003c/p\u003e","accepted_answer_id":"7582421","answer_count":"1","comment_count":"0","creation_date":"2011-09-28 11:09:08.647 UTC","last_activity_date":"2011-09-28 11:20:18.247 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"447202","post_type_id":"1","score":"1","tags":"wpf|treeview","view_count":"214"} {"id":"12660841","title":"Making my POS : Error on DataGridview","body":"\u003cp\u003eI am trying to make a simple POS system with VB.NET, but since I don't know how to start, i ask for my friends to give me sample source code. I planning to use MySQL for my database rather than Microsoft Access because our school uses it.\nBelow is a sample code of the source code :\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003ePublic Sub FillDGVWithReceiptInfo(ByVal DGV As DataGridView)\n DGV.Rows.Clear()\n Dim TA As New POSDSTableAdapters.ItemsTableAdapter\n\n For i = 0 To Me.ReceiptDetailsList.Count - 1\n Dim T1 = Me.ReceiptDetailsList(i).Barcode\n Dim T2 = Me.ReceiptDetailsList(i).ItemBuyPrice\n Dim T3 = Me.ReceiptDetailsList(i).ItemCount\n Dim T4 = Me.ReceiptDetailsList(i).ItemSellPrice\n Dim T5 = T3 * T4\n Dim T6 = TA.GetDataByBarcode(T1).Rows(0).Item(\"ItemName\")\n\n DGV.Rows.Add(T1, T6, T2, T4, T3, T5)\n\n Next\n End Sub\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003ei am trying to convert it to an \"OdBC\" kind of format. so i came up with this (also, this is the part where i get some error) :\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e Public Sub FillDGVWithReceiptInfo(ByVal DGV As DataGridView)\n DGV.Rows.Clear()\n\n For i = 0 To Me.ReceiptDetailsList.Count - 1\n Dim T1 = Me.ReceiptDetailsList(i).ganoProdID\n Dim T3 = Me.ReceiptDetailsList(i).ItemCount\n Dim T4 = Me.ReceiptDetailsList(i).ganoItemPrice\n Dim T5 = T3 * T4\n\n Dim TA As New OdbcDataAdapter(\"SELECT * FROM gano_inventory WHERE gano_proID = \" \u0026amp; T1 \u0026amp; \";\", conn)\n Dim R As New DataTable\n TA.Fill(R)\n\n Dim T6 = R.Rows(0).Item(\"gano_item\")\n\n DGV.Rows.Add(T1, T6, T4, T3, T5)\n\n Next\n End Sub\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003ethis is the code's error :\n\u003cem\u003eNo row can be added to a DataGridView control that does not have columns. Columns must be added first.\u003c/em\u003e in this line : \u003cstrong\u003eDGV.Rows.Add(T1, T6, T4, T3, T5)\u003c/strong\u003e\u003c/p\u003e\n\n\u003cp\u003ecan someone please help me with it? thank you in advance!\u003c/p\u003e","answer_count":"1","comment_count":"0","creation_date":"2012-09-30 11:21:01.953 UTC","last_activity_date":"2013-06-18 21:27:31.37 UTC","last_edit_date":"2012-09-30 13:34:48.07 UTC","last_editor_display_name":"","last_editor_user_id":"1643554","owner_display_name":"","owner_user_id":"1643554","post_type_id":"1","score":"0","tags":"vb.net|datagridview|odbc","view_count":"644"} -{"id":"33016898","title":"Elasticsearch request optimisation (strange script_score in Java API with bool query)","body":"\u003cp\u003eWith Elasticsearch 1.7.0, I'd like to make a query on a text field of my documents. I need to get all the documents which:\u003c/p\u003e\n\n\u003col\u003e\n\u003cli\u003e\u003cp\u003ematch partially (all the word needs to exist with synonyms et fuzzy)\u003c/p\u003e\u003c/li\u003e\n\u003cli\u003e\u003cp\u003ematch fuzzy (all the word needs to exist + fuzzy + phonetic)\u003c/p\u003e\u003c/li\u003e\n\u003cli\u003e\u003cp\u003ematch related (50% of the word need to be found)\u003c/p\u003e\u003c/li\u003e\n\u003c/ol\u003e\n\n\u003cp\u003eI made a Java program with 3 Elasticsearch requests but those queries were too long so I've tried to use one query for all that:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e{\n \"query\": \n {\"bool\": {\n \"should\": [\n {\n \"function_score\": {\n \"boost_mode\": \"replace\",\n \"query\": {\n \"match\": {\n \"text.syn\": {\n \"query\": \"sorbone\",\n \"operator\": \"and\",\n \"fuzziness\": 1,\n \"minimum_should_match\": \"100%\"\n }\n }\n },\n \"script_score\": {\n \"script\": \"1\"\n }\n }\n },\n {\n \"function_score\": {\n \"boost_mode\": \"replace\",\n \"query\": {\n \"match\": {\n \"text.phonetic\": {\n \"query\": \"sorbone\",\n \"operator\": \"and\",\n \"fuzziness\": 1,\n \"minimum_should_match\": \"100%\"\n }\n }\n },\n \"script_score\": {\n \"script\": \"3\"\n }\n }\n },\n {\n \"function_score\": {\n \"boost_mode\": \"replace\",\n \"query\": {\n \"match\": {\n \"text.phonetic\": {\n \"query\": \"sorbone\",\n \"operator\": \"or\", \n \"minimum_should_match\": \"50%\"\n }\n }\n },\n \"script_score\": {\n \"script\": \"7\"\n }\n }\n }\n ]\n }\n }\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eThe idea is to use a bool_query with a specific score for each document returned. It works well but when I try to convert it using Java API, I have a score strangely calculated, instead there are decimals in the score and I was waiting to have numbers like 7 3 1 4 10 8 which correspond to sum of score.\u003c/p\u003e\n\n\u003cp\u003eThe code I used:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e .operator(org.elasticsearch.index.query.MatchQueryBuilder.Operator.AND)\n .fuzziness(Fuzziness.ONE)\n .minimumShouldMatch(\"100%\");\n QueryBuilder termsPhon = matchQuery(\"text.phonetic\", \"sorbonne\")\n .operator(org.elasticsearch.index.query.MatchQueryBuilder.Operator.AND)\n .fuzziness(Fuzziness.ONE)\n .minimumShouldMatch(\"100%\");\n QueryBuilder termsText = matchQuery(\"text\", \"sorbonne\")\n .operator(org.elasticsearch.index.query.MatchQueryBuilder.Operator.OR)\n .minimumShouldMatch(\"50%\");\n QueryBuilder functionScorePartial = functionScoreQuery(termsSyn)\n .add(ScoreFunctionBuilders.scriptFunction(\"1\"))\n .boostMode(\"replace\"); \n\n\nQueryBuilder functionScoreFuzzy = functionScoreQuery(termsPhon)\n .add(ScoreFunctionBuilders.scriptFunction(\"7\"))\n .boostMode(\"replace\"); \n\nQueryBuilder functionScoreRelated = functionScoreQuery(termsText)\n .add(ScoreFunctionBuilders.scriptFunction(\"15\"))\n .boostMode(\"replace\")\n ; \n\nQueryBuilder boolQ = boolQuery()\n .should(functionScorePartial)\n .should(functionScoreFuzzy)\n .should(functionScoreRelated);\n\nsqb.setQuery(boolQ);\n\n\nSearchResponse response = sqb.execute().actionGet();\nSearchHits hits = response.getHits();\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eWhen I look to the generated JSON I see that the script function is not generated the same way. In the original REST I've got:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e\"functions\" : [ {\n \"script_score\" : {\n \"script\" : \"1\"\n }\n } ],\n \"boost_mode\" : \"replace\"\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eIn the generated JSON, there's no \"functions\" array:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e \"script_score\": {\n \"script\": \"1\"\n }\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eIs it a bug in the Elasticsearch Java API?\u003c/p\u003e","answer_count":"2","comment_count":"0","creation_date":"2015-10-08 13:12:43.48 UTC","last_activity_date":"2015-10-13 17:38:20.99 UTC","last_edit_date":"2015-10-08 13:46:35.513 UTC","last_editor_display_name":"","last_editor_user_id":"880772","owner_display_name":"","owner_user_id":"5061275","post_type_id":"1","score":"0","tags":"java|elasticsearch","view_count":"193"} +{"id":"33016898","title":"OpenSearchrequest optimisation (strange script_score in Java API with bool query)","body":"\u003cp\u003eWith OpenSearch1.7.0, I'd like to make a query on a text field of my documents. I need to get all the documents which:\u003c/p\u003e\n\n\u003col\u003e\n\u003cli\u003e\u003cp\u003ematch partially (all the word needs to exist with synonyms et fuzzy)\u003c/p\u003e\u003c/li\u003e\n\u003cli\u003e\u003cp\u003ematch fuzzy (all the word needs to exist + fuzzy + phonetic)\u003c/p\u003e\u003c/li\u003e\n\u003cli\u003e\u003cp\u003ematch related (50% of the word need to be found)\u003c/p\u003e\u003c/li\u003e\n\u003c/ol\u003e\n\n\u003cp\u003eI made a Java program with 3 OpenSearchrequests but those queries were too long so I've tried to use one query for all that:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e{\n \"query\": \n {\"bool\": {\n \"should\": [\n {\n \"function_score\": {\n \"boost_mode\": \"replace\",\n \"query\": {\n \"match\": {\n \"text.syn\": {\n \"query\": \"sorbone\",\n \"operator\": \"and\",\n \"fuzziness\": 1,\n \"minimum_should_match\": \"100%\"\n }\n }\n },\n \"script_score\": {\n \"script\": \"1\"\n }\n }\n },\n {\n \"function_score\": {\n \"boost_mode\": \"replace\",\n \"query\": {\n \"match\": {\n \"text.phonetic\": {\n \"query\": \"sorbone\",\n \"operator\": \"and\",\n \"fuzziness\": 1,\n \"minimum_should_match\": \"100%\"\n }\n }\n },\n \"script_score\": {\n \"script\": \"3\"\n }\n }\n },\n {\n \"function_score\": {\n \"boost_mode\": \"replace\",\n \"query\": {\n \"match\": {\n \"text.phonetic\": {\n \"query\": \"sorbone\",\n \"operator\": \"or\", \n \"minimum_should_match\": \"50%\"\n }\n }\n },\n \"script_score\": {\n \"script\": \"7\"\n }\n }\n }\n ]\n }\n }\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eThe idea is to use a bool_query with a specific score for each document returned. It works well but when I try to convert it using Java API, I have a score strangely calculated, instead there are decimals in the score and I was waiting to have numbers like 7 3 1 4 10 8 which correspond to sum of score.\u003c/p\u003e\n\n\u003cp\u003eThe code I used:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e .operator(org.opensearch.index.query.MatchQueryBuilder.Operator.AND)\n .fuzziness(Fuzziness.ONE)\n .minimumShouldMatch(\"100%\");\n QueryBuilder termsPhon = matchQuery(\"text.phonetic\", \"sorbonne\")\n .operator(org.opensearch.index.query.MatchQueryBuilder.Operator.AND)\n .fuzziness(Fuzziness.ONE)\n .minimumShouldMatch(\"100%\");\n QueryBuilder termsText = matchQuery(\"text\", \"sorbonne\")\n .operator(org.opensearch.index.query.MatchQueryBuilder.Operator.OR)\n .minimumShouldMatch(\"50%\");\n QueryBuilder functionScorePartial = functionScoreQuery(termsSyn)\n .add(ScoreFunctionBuilders.scriptFunction(\"1\"))\n .boostMode(\"replace\"); \n\n\nQueryBuilder functionScoreFuzzy = functionScoreQuery(termsPhon)\n .add(ScoreFunctionBuilders.scriptFunction(\"7\"))\n .boostMode(\"replace\"); \n\nQueryBuilder functionScoreRelated = functionScoreQuery(termsText)\n .add(ScoreFunctionBuilders.scriptFunction(\"15\"))\n .boostMode(\"replace\")\n ; \n\nQueryBuilder boolQ = boolQuery()\n .should(functionScorePartial)\n .should(functionScoreFuzzy)\n .should(functionScoreRelated);\n\nsqb.setQuery(boolQ);\n\n\nSearchResponse response = sqb.execute().actionGet();\nSearchHits hits = response.getHits();\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eWhen I look to the generated JSON I see that the script function is not generated the same way. In the original REST I've got:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e\"functions\" : [ {\n \"script_score\" : {\n \"script\" : \"1\"\n }\n } ],\n \"boost_mode\" : \"replace\"\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eIn the generated JSON, there's no \"functions\" array:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e \"script_score\": {\n \"script\": \"1\"\n }\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eIs it a bug in the OpenSearchJava API?\u003c/p\u003e","answer_count":"2","comment_count":"0","creation_date":"2015-10-08 13:12:43.48 UTC","last_activity_date":"2015-10-13 17:38:20.99 UTC","last_edit_date":"2015-10-08 13:46:35.513 UTC","last_editor_display_name":"","last_editor_user_id":"880772","owner_display_name":"","owner_user_id":"5061275","post_type_id":"1","score":"0","tags":"java|opensearch","view_count":"193"} {"id":"6719069","title":"WP7 7.1 (Mango) Database Versioning","body":"\u003cp\u003eI have a class structure that's saved to the local SQL DB on the phone. In the next version of the app, the class structure has changed.\u003c/p\u003e\n\n\u003cp\u003eHow does the SQL DB deserialize the data into the changed objects/structure?\u003c/p\u003e","accepted_answer_id":"6748873","answer_count":"1","comment_count":"0","creation_date":"2011-07-16 17:34:45.21 UTC","last_activity_date":"2011-07-28 23:27:26.86 UTC","last_edit_date":"2011-07-28 23:27:26.86 UTC","last_editor_display_name":"","last_editor_user_id":"149573","owner_display_name":"","owner_user_id":"68499","post_type_id":"1","score":"0","tags":"database|windows-phone-7","view_count":"242"} {"id":"37657474","title":"Google classroom api doesn't return the alias when a course is created","body":"\u003cp\u003eBackground: We currently have a database with every course, teacher and student in our school board. I am basically trying to build a system to sync this with our Google Classroom environment, so every teacher will have their courses, students will be enrolled ect.\u003c/p\u003e\n\n\u003cp\u003eProblem: We have over 8000 courses to create and want to use the batch system or at least create them asynchronously. We pass our internal unique course ID in the create call through the alias. However in the callback method this value is not passed back. This means we have no way of linking the google unique ID to ours, and no way of knowing if something goes wrong, which courses were not created.\u003c/p\u003e\n\n\u003cp\u003eExample: I want to create 5 courses with the following ids:\n1234\n1235\n1236\n1237\n1238\u003c/p\u003e\n\n\u003cp\u003eSo I create a batch request and the call back gets called 5 times. The data in the call back does not contain the IDs I sent in though if only contains the google IDs:\u003c/p\u003e\n\n\u003cp\u003e9876\n9875\nError\n9873\n9872\u003c/p\u003e\n\n\u003cp\u003eThe API specifically mentions that the order cannot be trusted. So how can I tell which google ID belong to which course and how can I tell witch course had the error?\u003c/p\u003e","answer_count":"1","comment_count":"2","creation_date":"2016-06-06 12:27:08.82 UTC","last_activity_date":"2016-08-10 19:10:24.06 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"5326876","post_type_id":"1","score":"0","tags":"google-classroom","view_count":"74"} {"id":"3761021","title":"Toolbar package missing in sdk 6.0","body":"\u003cp\u003e\u003cimg src=\"https://i.stack.imgur.com/AxN3P.png\" alt=\"alt text\"\u003e\u003c/p\u003e\n\n\u003cp\u003eI can not use the toolbar lib even with sdk 6.0\u003c/p\u003e\n\n\u003cp\u003eI am using 6.0\u003c/p\u003e\n\n\u003cp\u003eCan anyone help me .. i m stuck here \u003c/p\u003e","answer_count":"1","comment_count":"0","creation_date":"2010-09-21 13:55:38.947 UTC","last_activity_date":"2010-09-21 15:34:28.76 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"410693","post_type_id":"1","score":"0","tags":"java|blackberry","view_count":"36"} diff --git a/test/integration/README.md b/test/integration/README.md index cdda897a7..a89863b30 100644 --- a/test/integration/README.md +++ b/test/integration/README.md @@ -1,13 +1,19 @@ -# `elasticsearch-js` integration test suite +# `opensearch-js` integration test suite > What? A README to explain how the integration test work?? Yes. ## Background +<<<<<<< HEAD Elasticsearch offers its entire API via HTTP REST endpoints. You can find the whole API specification for every version [here](https://github.com/opensearch-project/OpenSearch/tree/main/rest-api-spec/src/main/resources/rest-api-spec/api).
To support different languages at the same time, the Elasticsearch team decided to provide a [YAML specification](https://github.com/opensearch-project/OpenSearch/tree/main/rest-api-spec/src/main/resources/rest-api-spec/test) to test every endpoint, body, headers, warning, error and so on.
This testing suite uses that specification to generate the test for the specified version of Elasticsearch on the fly. +======= +OpenSearchoffers its entire API via HTTP REST endpoints. You can find the whole API specification for every version [here](https://github.com/opensearch-project/opensearch/tree/master/rest-api-spec/src/main/resources/rest-api-spec/api).
+To support different languages at the same time, the OpenSearchteam decided to provide a [YAML specification](https://github.com/opensearch-project/opensearch/tree/master/rest-api-spec/src/main/resources/rest-api-spec/test) to test every endpoint, body, headers, warning, error and so on.
+This testing suite uses that specification to generate the test for the specified version of OpenSearchon the fly. +>>>>>>> Rename all elastic/elasticsearch/kibana references and clean up any remaining x-pack resources ## Run Run the testing suite is very easy, you just need to run the preconfigured npm script: @@ -15,8 +21,8 @@ Run the testing suite is very easy, you just need to run the preconfigured npm s npm run test:integration ``` -The first time you run this command, the Elasticsearch repository will be cloned inside the integration test folder, to be able to access the YAML specification, so it might take some time *(luckily, only the first time)*.
-Once the Elasticsearch repository has been cloned, the testing suite will connect to the provided Elasticsearch instance and then checkout the build hash in the repository. Finally, it will start running every test. +The first time you run this command, the OpenSearchrepository will be cloned inside the integration test folder, to be able to access the YAML specification, so it might take some time *(luckily, only the first time)*.
+Once the OpenSearchrepository has been cloned, the testing suite will connect to the provided OpenSearchinstance and then checkout the build hash in the repository. Finally, it will start running every test. The specification does not allow the test to be run in parallel, so it might take a while to run the entire testing suite; on my machine, `MacBookPro15,2 core i7 2.7GHz 16GB of RAM` it takes around four minutes. @@ -34,8 +40,8 @@ npm run test:integration -- --cov --coverage-report=html ## How does this thing work? At first sight, it might seem complicated, but once you understand what the moving parts are, it's quite easy. -1. Connects to the given Elasticsearch instance -1. Gets the ES version and build hash +1. Connects to the given OpenSearch instance +1. Gets the opensearch version and build hash 1. Checkout to the given hash (and clone the repository if it is not present) 1. Reads the folder list and for each folder the yaml file list 1. Starts running folder by folder every file diff --git a/test/integration/index.js b/test/integration/index.js index f4e831abc..a0c01fc39 100644 --- a/test/integration/index.js +++ b/test/integration/index.js @@ -55,11 +55,11 @@ const ossSkips = { 'cat.indices/10_basic.yml': ['Test cat indices output for closed index (pre 7.2.0)'], 'cluster.health/10_basic.yml': ['cluster health with closed index (pre 7.2.0)'], // TODO: remove this once 'arbitrary_key' is implemented - // https://github.com/elastic/elasticsearch/pull/41492 + // https://github.com/opensearch-project/opensearch/pull/41492 'indices.split/30_copy_settings.yml': ['*'], 'indices.stats/50_disk_usage.yml': ['Disk usage stats'], 'indices.stats/60_field_usage.yml': ['Field usage stats'], - // skipping because we are booting ES with `discovery.type=single-node` + // skipping because we are booting opensearch with `discovery.type=single-node` // and this test will fail because of this configuration 'nodes.stats/30_discovery.yml': ['*'], // the expected error is returning a 503, @@ -97,7 +97,7 @@ async function waitCluster (client, times = 0) { } async function start ({ client }) { - log('Waiting for Elasticsearch') + log('Waiting for OpenSearch') await waitCluster(client) const { body } = await client.info() @@ -277,7 +277,7 @@ const shouldSkip = (file, name) => { const ossTest = ossSkips[list[i]] for (let j = 0; j < ossTest.length; j++) { if (file.endsWith(list[i]) && (name === ossTest[j] || ossTest[j] === '*')) { - const testName = file.slice(file.indexOf(`${sep}elasticsearch${sep}`)) + ' / ' + name + const testName = file.slice(file.indexOf(`${sep}opensearch${sep}`)) + ' / ' + name log(`Skipping test ${testName} because is blacklisted in the oss test`) return true } diff --git a/test/integration/test-runner.js b/test/integration/test-runner.js index ce199342f..d07c6e934 100644 --- a/test/integration/test-runner.js +++ b/test/integration/test-runner.js @@ -511,7 +511,7 @@ function match (val1, val2, action) { // both values are objects if (typeof val1 === 'object' && typeof val2 === 'object') { assert.ok(deepEqual(val1, val2), action) - // the first value is the body as string and the second a pattern string + // the first value is the body as string and the second a pattern string } else if ( typeof val1 === 'string' && typeof val2 === 'string' && val2.startsWith('/') && (val2.endsWith('/\n') || val2.endsWith('/')) @@ -528,7 +528,7 @@ function match (val1, val2, action) { // 'm' adds the support for multiline regex assert.ok(new RegExp(regStr, 'm').test(val1), `should match pattern provided: ${val2}, action: ${JSON.stringify(action)}`) // tap.match(val1, new RegExp(regStr, 'm'), `should match pattern provided: ${val2}, action: ${JSON.stringify(action)}`) - // everything else + // everything else } else { assert.equal(val1, val2, `should be equal: ${val1} - ${val2}, action: ${JSON.stringify(action)}`) } @@ -766,13 +766,13 @@ function shouldSkip (esVersion, action) { // if both `min` and `max` are specified if (min && max) { shouldSkip = semver.satisfies(esVersion, action.version) - // if only `min` is specified + // if only `min` is specified } else if (min) { shouldSkip = semver.gte(esVersion, min) - // if only `max` is specified + // if only `max` is specified } else if (max) { shouldSkip = semver.lte(esVersion, max) - // something went wrong! + // something went wrong! } else { throw new Error(`skip: Bad version range: ${action.version}`) } diff --git a/test/types/client-options.test-d.ts b/test/types/client-options.test-d.ts index 5e39949d0..7376edb70 100644 --- a/test/types/client-options.test-d.ts +++ b/test/types/client-options.test-d.ts @@ -464,7 +464,7 @@ expectError( expectType( new Client({ node: 'http://localhost:9200', - generateRequestId (params, options) { + generateRequestId(params, options) { return 'id' } }) @@ -484,7 +484,7 @@ expectError( expectType( new Client({ node: 'http://localhost:9200', - nodeSelector (connections) { + nodeSelector(connections) { return connections[0] } }) @@ -501,7 +501,7 @@ expectError( // @ts-expect-error new Client({ node: 'http://localhost:9200', - nodeSelector (connections) { + nodeSelector(connections) { return 'id' } }) @@ -513,7 +513,7 @@ expectError( expectType( new Client({ node: 'http://localhost:9200', - nodeFilter (connection) { + nodeFilter(connection) { return true } }) @@ -523,7 +523,7 @@ expectError( // @ts-expect-error new Client({ node: 'http://localhost:9200', - nodeFilter (connection) { + nodeFilter(connection) { return 'id' } }) @@ -534,7 +534,7 @@ expectError( */ { class CustomSerializer extends Serializer { - deserialize (str: string) { + deserialize(str: string) { return super.deserialize(str) } } @@ -552,7 +552,7 @@ expectError( */ { class CustomConnection extends Connection { - close () { + close() { return super.close() } } @@ -567,7 +567,7 @@ expectError( { class CustomConnection { - close () { + close() { return Promise.resolve() } } @@ -586,7 +586,7 @@ expectError( */ { class CustomConnectionPool extends ConnectionPool { - empty () { + empty() { return super.empty() } } @@ -601,7 +601,7 @@ expectError( { class CustomConnectionPool { - empty () { + empty() { return this } } @@ -620,7 +620,7 @@ expectError( */ { class CustomTransport extends Transport { - getConnection (opts: TransportGetConnectionOptions) { + getConnection(opts: TransportGetConnectionOptions) { return super.getConnection(opts) } } @@ -635,7 +635,7 @@ expectError( { class CustomTransport { - getConnection (opts: TransportGetConnectionOptions) { + getConnection(opts: TransportGetConnectionOptions) { return null } } diff --git a/test/types/connection-pool.test-d.ts b/test/types/connection-pool.test-d.ts index 3a61b1998..efd44d6cc 100644 --- a/test/types/connection-pool.test-d.ts +++ b/test/types/connection-pool.test-d.ts @@ -53,8 +53,8 @@ import { expectType(pool.markAlive(new Connection())) expectType(pool.markDead(new Connection())) expectType(pool.getConnection({ - filter (node) { return true }, - selector (connections) { return connections[0] }, + filter(node) { return true }, + selector(connections) { return connections[0] }, requestId: 'id', name: 'name', now: Date.now() @@ -87,8 +87,8 @@ import { expectAssignable(pool.markAlive(new Connection())) expectAssignable(pool.markDead(new Connection())) expectType(pool.getConnection({ - filter (node) { return true }, - selector (connections) { return connections[0] }, + filter(node) { return true }, + selector(connections) { return connections[0] }, requestId: 'id', name: 'name', now: Date.now() diff --git a/test/types/connection.test-d.ts b/test/types/connection.test-d.ts index 0b3a19e12..7b855afe4 100644 --- a/test/types/connection.test-d.ts +++ b/test/types/connection.test-d.ts @@ -57,7 +57,7 @@ import { ConnectionOptions } from '../../lib/Connection' { const conn = new Connection({ url: new URL('http://localhost:9200'), - agent (opts) { + agent(opts) { expectType(opts) return 'the agent' } diff --git a/test/types/errors.test-d.ts b/test/types/errors.test-d.ts index fee5fae39..9fffcc33d 100644 --- a/test/types/errors.test-d.ts +++ b/test/types/errors.test-d.ts @@ -51,7 +51,7 @@ const response = { } { - const err = new errors.ElasticsearchClientError() + const err = new errors.OpenSearchClientError() expectType(err.name) expectType(err.message) } diff --git a/test/types/helpers.test-d.ts b/test/types/helpers.test-d.ts index 9ad34a866..4df07fe76 100644 --- a/test/types/helpers.test-d.ts +++ b/test/types/helpers.test-d.ts @@ -48,7 +48,7 @@ const client = new Client({ const b = client.helpers.bulk>({ datasource: [], - onDocument (doc) { + onDocument(doc) { expectType>(doc) return { index: { _index: 'test' } } }, @@ -57,7 +57,7 @@ const b = client.helpers.bulk>({ concurrency: 5, retries: 3, wait: 5000, - onDrop (doc) { + onDrop(doc) { expectType>>(doc) }, refreshOnCompletion: true, @@ -72,7 +72,7 @@ b.then(stats => expectType(stats)) expectError( client.helpers.bulk({ datasource: [], - onDocument (doc) { + onDocument(doc) { return { index: { _index: 'test' } } }, body: [] @@ -84,7 +84,7 @@ expectError( { const options = { datasource: [], - onDocument (doc: Record) { + onDocument(doc: Record) { return { index: { _index: 'test' } } } } @@ -94,7 +94,7 @@ expectError( { const options = { datasource: [], - onDocument (doc: Record) { + onDocument(doc: Record) { return { create: { _index: 'test' } } } } @@ -107,7 +107,7 @@ expectError( // a `.helper.bulk`, it works as expected const options: BulkHelperOptions> = { datasource: [], - onDocument (doc: Record) { + onDocument(doc: Record) { return [{ update: { _index: 'test' } }, doc] } } @@ -117,7 +117,7 @@ expectError( { const options = { datasource: [], - onDocument (doc: Record) { + onDocument(doc: Record) { return { delete: { _index: 'test' } } } } @@ -128,7 +128,7 @@ expectError( // just search params { - async function test () { + async function test() { const scrollSearch = client.helpers.scrollSearch({ index: 'test', body: { @@ -146,7 +146,7 @@ expectError( // search params and options { - async function test () { + async function test() { const scrollSearch = client.helpers.scrollSearch({ index: 'test', body: { @@ -210,7 +210,7 @@ expectError( foo: string } - async function test () { + async function test() { const scrollSearch = client.helpers.scrollSearch>({ index: 'test', body: { @@ -279,7 +279,7 @@ expectError( foo: string } - async function test () { + async function test() { const scrollSearch = client.helpers.scrollSearch, SearchBody, Record>({ index: 'test', body: { @@ -302,7 +302,7 @@ expectError( // just search params { - async function test () { + async function test() { const scrollDocuments = client.helpers.scrollDocuments({ index: 'test', body: { @@ -320,7 +320,7 @@ expectError( // search params and options { - async function test () { + async function test() { const scrollDocuments = client.helpers.scrollDocuments({ index: 'test', body: { @@ -342,7 +342,7 @@ expectError( foo: string } - async function test () { + async function test() { const scrollDocuments = client.helpers.scrollDocuments({ index: 'test', body: { @@ -369,7 +369,7 @@ expectError( foo: string } - async function test () { + async function test() { const scrollDocuments = client.helpers.scrollDocuments({ index: 'test', body: { @@ -474,14 +474,14 @@ expectType(s) expectType(s.stop()) expectType(s.stop(new Error('kaboom'))) -expectType, unknown>>>(s.search({ index: 'foo'}, { query: {} })) -expectType>>(s.search, string>({ index: 'foo'}, { query: {} })) +expectType, unknown>>>(s.search({ index: 'foo' }, { query: {} })) +expectType>>(s.search, string>({ index: 'foo' }, { query: {} })) -expectType(s.search({ index: 'foo'}, { query: {} }, (err, result) => { +expectType(s.search({ index: 'foo' }, { query: {} }, (err, result) => { expectType(err) expectType(result) })) -expectType(s.search, string>({ index: 'foo'}, { query: {} }, (err, result) => { +expectType(s.search, string>({ index: 'foo' }, { query: {} }, (err, result) => { expectType(err) expectType>(result) })) diff --git a/test/types/kibana.test-d.ts b/test/types/kibana.test-d.ts deleted file mode 100644 index 44e3d74b5..000000000 --- a/test/types/kibana.test-d.ts +++ /dev/null @@ -1,127 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { expectType, expectNotType, expectError } from 'tsd' -import { Client, RequestEvent, ResurrectEvent, ApiError, ApiResponse, estypes } from '../../' -import { KibanaClient } from '../../api/kibana' -import { TransportRequestPromise, Context } from '../../lib/Transport' - -// @ts-expect-error -const client: KibanaClient = new Client({ - node: 'http://localhost:9200' -}) - -client.on('request', (err, meta) => { - expectType(err) - expectType(meta) -}) - -client.on('response', (err, meta) => { - expectType(err) - expectType(meta) -}) - -client.on('sniff', (err, meta) => { - expectType(err) - expectType(meta) -}) - -client.on('resurrect', (err, meta) => { - expectType(err) - expectType(meta) -}) - -// No generics -{ - const response = await client.cat.count({ index: 'test' }) - - expectType(response.body) - expectType(response.meta.context) -} - -// Define only the context -{ - const response = await client.cat.count({ index: 'test' }) - - expectType(response.body) - expectType(response.meta.context) -} - -// Check API returned type and optional parameters -{ - const promise = client.info() - expectType>>(promise) - promise - .then(result => expectType>(result)) - .catch((err: ApiError) => expectType(err)) - expectType(promise.abort()) -} - -{ - const promise = client.info({ pretty: true }) - expectType>>(promise) - promise - .then(result => expectType>(result)) - .catch((err: ApiError) => expectType(err)) - expectType(promise.abort()) -} - -{ - const promise = client.info({ pretty: true }, { ignore: [404] }) - expectType>>(promise) - promise - .then(result => expectType>(result)) - .catch((err: ApiError) => expectType(err)) - expectType(promise.abort()) -} - -// body that does not respect the RequestBody constraint -expectError( - client.search({ - index: 'hello', - body: 42 - }).then(console.log) -) - -// @ts-expect-error -client.async_search.get() - -// callback api is not supported -expectError(client.cat.count({ index: 'test' }, {}, (err: any, result: any) => {})) - -// close api, only promises should be supported -// callback api is not supported -expectType>(client.close()) -expectError(client.close(() => {})) - -// the child api should return a KibanaClient instance -const child = client.child() -expectType(child) -expectNotType(child) \ No newline at end of file diff --git a/test/types/new-types.test-d.ts b/test/types/new-types.test-d.ts index 787a1b2ee..ffc6d3243 100644 --- a/test/types/new-types.test-d.ts +++ b/test/types/new-types.test-d.ts @@ -29,7 +29,7 @@ */ import { expectType, expectNotType, expectError } from 'tsd' -import { Client, RequestEvent, ResurrectEvent, ApiError, ApiResponse, estypes } from '../../' +import { Client, RequestEvent, ResurrectEvent, ApiError, ApiResponse, ostypes } from '../../' import type { Client as NewTypes } from '../../api/new' import { TransportRequestPromise, Context } from '../../lib/Transport' @@ -62,7 +62,7 @@ client.on('resurrect', (err, meta) => { { const response = await client.cat.count({ index: 'test' }) - expectType(response.body) + expectType(response.body) expectType(response.meta.context) } @@ -70,34 +70,34 @@ client.on('resurrect', (err, meta) => { { const response = await client.cat.count({ index: 'test' }) - expectType(response.body) + expectType(response.body) expectType(response.meta.context) } // Check API returned type and optional parameters { const promise = client.info() - expectType>>(promise) + expectType>>(promise) promise - .then(result => expectType>(result)) + .then(result => expectType>(result)) .catch((err: ApiError) => expectType(err)) expectType(promise.abort()) } { const promise = client.info({ pretty: true }) - expectType>>(promise) + expectType>>(promise) promise - .then(result => expectType>(result)) + .then(result => expectType>(result)) .catch((err: ApiError) => expectType(err)) expectType(promise.abort()) } { const promise = client.info({ pretty: true }, { ignore: [404] }) - expectType>>(promise) + expectType>>(promise) promise - .then(result => expectType>(result)) + .then(result => expectType>(result)) .catch((err: ApiError) => expectType(err)) expectType(promise.abort()) } @@ -113,7 +113,7 @@ expectError( // @ts-expect-error client.async_search.get() -// the child api should return a KibanaClient instance +// the child api should return a OpenSearchDashboardsClient instance const child = client.child() expectType(child) expectNotType(child) \ No newline at end of file diff --git a/test/types/transport.test-d.ts b/test/types/transport.test-d.ts index 574c1489c..c95323c85 100644 --- a/test/types/transport.test-d.ts +++ b/test/types/transport.test-d.ts @@ -127,7 +127,7 @@ const transport = new Transport({ expectType(transport) -expectType(transport.request(params, options, (err, result) => {})) +expectType(transport.request(params, options, (err, result) => { })) // querystring as string transport.request({ diff --git a/test/unit/client.test.js b/test/unit/client.test.js index 377234635..5b782d02f 100644 --- a/test/unit/client.test.js +++ b/test/unit/client.test.js @@ -649,14 +649,14 @@ test('Extend client APIs', t => { t.end() }) -test('Elastic cloud config', t => { +test('opensearch cloud config', t => { t.test('Basic', t => { t.plan(5) const client = new Client({ cloud: { // 'localhost$abcd$efgh' id: 'name:bG9jYWxob3N0JGFiY2QkZWZnaA==', - username: 'elastic', + username: 'opensearch', password: 'changeme' } }) @@ -664,10 +664,10 @@ test('Elastic cloud config', t => { const pool = client.connectionPool t.ok(pool instanceof CloudConnectionPool) t.match(pool.connections.find(c => c.id === 'https://abcd.localhost/'), { - url: new URL('https://elastic:changeme@abcd.localhost'), + url: new URL('https://opensearch:changeme@abcd.localhost'), id: 'https://abcd.localhost/', headers: { - authorization: 'Basic ' + Buffer.from('elastic:changeme').toString('base64') + authorization: 'Basic ' + Buffer.from('opensearch:changeme').toString('base64') }, ssl: { secureProtocol: 'TLSv1_2_method' }, deadCount: 0, @@ -684,13 +684,13 @@ test('Elastic cloud config', t => { t.same(pool._ssl, { secureProtocol: 'TLSv1_2_method' }) }) - t.test('Without kibana component', t => { + t.test('Without opensearchDashboards component', t => { t.plan(5) const client = new Client({ cloud: { // 'localhost$abcd$' id: 'name:bG9jYWxob3N0JGFiY2Qk', - username: 'elastic', + username: 'opensearch', password: 'changeme' } }) @@ -698,10 +698,10 @@ test('Elastic cloud config', t => { const pool = client.connectionPool t.ok(pool instanceof CloudConnectionPool) t.match(pool.connections.find(c => c.id === 'https://abcd.localhost/'), { - url: new URL('https://elastic:changeme@abcd.localhost'), + url: new URL('https://opensearch:changeme@abcd.localhost'), id: 'https://abcd.localhost/', headers: { - authorization: 'Basic ' + Buffer.from('elastic:changeme').toString('base64') + authorization: 'Basic ' + Buffer.from('opensearch:changeme').toString('base64') }, ssl: { secureProtocol: 'TLSv1_2_method' }, deadCount: 0, @@ -726,7 +726,7 @@ test('Elastic cloud config', t => { id: 'name:bG9jYWxob3N0JGFiY2QkZWZnaA==' }, auth: { - username: 'elastic', + username: 'opensearch', password: 'changeme' } }) @@ -734,10 +734,10 @@ test('Elastic cloud config', t => { const pool = client.connectionPool t.ok(pool instanceof CloudConnectionPool) t.match(pool.connections.find(c => c.id === 'https://abcd.localhost/'), { - url: new URL('https://elastic:changeme@abcd.localhost'), + url: new URL('https://opensearch:changeme@abcd.localhost'), id: 'https://abcd.localhost/', headers: { - authorization: 'Basic ' + Buffer.from('elastic:changeme').toString('base64') + authorization: 'Basic ' + Buffer.from('opensearch:changeme').toString('base64') }, ssl: { secureProtocol: 'TLSv1_2_method' }, deadCount: 0, @@ -760,7 +760,7 @@ test('Elastic cloud config', t => { cloud: { // 'localhost$abcd$efgh' id: 'name:bG9jYWxob3N0JGFiY2QkZWZnaA==', - username: 'elastic', + username: 'opensearch', password: 'changeme' }, compression: false, @@ -1143,14 +1143,14 @@ test('API compatibility header (json)', t => { t.plan(4) function handler (req, res) { - t.equal(req.headers.accept, 'application/vnd.elasticsearch+json; compatible-with=7') - t.equal(req.headers['content-type'], 'application/vnd.elasticsearch+json; compatible-with=7') - res.setHeader('Content-Type', 'application/vnd.elasticsearch+json; compatible-with=7') + t.equal(req.headers.accept, 'application/vnd.opensearch+json; compatible-with=7') + t.equal(req.headers['content-type'], 'application/vnd.opensearch+json; compatible-with=7') + res.setHeader('Content-Type', 'application/vnd.opensearch+json; compatible-with=7') res.end(JSON.stringify({ hello: 'world' })) } buildServer(handler, ({ port }, server) => { - process.env.ELASTIC_CLIENT_APIVERSIONING = 'true' + process.env.OPENSEARCH_CLIENT_APIVERSIONING = 'true' const client = new Client({ node: `http://localhost:${port}` }) @@ -1159,7 +1159,7 @@ test('API compatibility header (json)', t => { t.error(err) t.same(body, { hello: 'world' }) server.stop() - delete process.env.ELASTIC_CLIENT_APIVERSIONING + delete process.env.OPENSEARCH_CLIENT_APIVERSIONING }) }) }) @@ -1168,14 +1168,14 @@ test('API compatibility header (x-ndjson)', t => { t.plan(4) function handler (req, res) { - t.equal(req.headers.accept, 'application/vnd.elasticsearch+json; compatible-with=7') - t.equal(req.headers['content-type'], 'application/vnd.elasticsearch+x-ndjson; compatible-with=7') - res.setHeader('Content-Type', 'application/vnd.elasticsearch+json; compatible-with=7') + t.equal(req.headers.accept, 'application/vnd.opensearch+json; compatible-with=7') + t.equal(req.headers['content-type'], 'application/vnd.opensearch+x-ndjson; compatible-with=7') + res.setHeader('Content-Type', 'application/vnd.opensearch+json; compatible-with=7') res.end(JSON.stringify({ hello: 'world' })) } buildServer(handler, ({ port }, server) => { - process.env.ELASTIC_CLIENT_APIVERSIONING = 'true' + process.env.OPENSEARCH_CLIENT_APIVERSIONING = 'true' const client = new Client({ node: `http://localhost:${port}` }) @@ -1184,7 +1184,7 @@ test('API compatibility header (x-ndjson)', t => { t.error(err) t.same(body, { hello: 'world' }) server.stop() - delete process.env.ELASTIC_CLIENT_APIVERSIONING + delete process.env.OPENSEARCH_CLIENT_APIVERSIONING }) }) }) diff --git a/test/unit/connection-pool.test.js b/test/unit/connection-pool.test.js index 445e52c69..a904a17fa 100644 --- a/test/unit/connection-pool.test.js +++ b/test/unit/connection-pool.test.js @@ -137,7 +137,7 @@ test('API', t => { const opts = { now: Date.now() + 1000 * 60 * 3, requestId: 1, - name: 'elasticsearch-js' + name: 'opensearch-js' } pool.resurrect(opts, (isAlive, connection) => { t.ok(isAlive) @@ -163,7 +163,7 @@ test('API', t => { const opts = { now: Date.now() + 1000 * 60 * 3, requestId: 1, - name: 'elasticsearch-js' + name: 'opensearch-js' } pool.resurrect(opts, (isAlive, connection) => { t.notOk(isAlive) @@ -191,7 +191,7 @@ test('API', t => { const opts = { now: Date.now() + 1000 * 60 * 3, requestId: 1, - name: 'elasticsearch-js' + name: 'opensearch-js' } pool.resurrect(opts, (isAlive, connection) => { t.ok(isAlive) @@ -216,7 +216,7 @@ test('API', t => { const opts = { now: Date.now() + 1000 * 60 * 3, requestId: 1, - name: 'elasticsearch-js' + name: 'opensearch-js' } pool.resurrect(opts, (isAlive, connection) => { t.ok(isAlive === null) diff --git a/test/unit/connection.test.js b/test/unit/connection.test.js index 7dfcd7719..67cb1a197 100644 --- a/test/unit/connection.test.js +++ b/test/unit/connection.test.js @@ -755,7 +755,7 @@ test('connection.toJSON should hide agent, ssl and auth', t => { }) }) -// https://github.com/elastic/elasticsearch-js/issues/843 +// https://github.com/opensearch-project/opensearch-js/issues/843 test('Port handling', t => { t.test('http 80', t => { const connection = new Connection({ diff --git a/test/unit/errors.test.js b/test/unit/errors.test.js index dd319de28..33a9cbc8d 100644 --- a/test/unit/errors.test.js +++ b/test/unit/errors.test.js @@ -35,8 +35,8 @@ const { test } = require('tap') const { errors } = require('../../index') -test('ElasticsearchClientError', t => { - const err = new errors.ElasticsearchClientError() +test('OpenSearchClientError', t => { + const err = new errors.OpenSearchClientError() t.ok(err instanceof Error) t.end() }) @@ -44,7 +44,7 @@ test('ElasticsearchClientError', t => { test('TimeoutError', t => { const err = new errors.TimeoutError() t.ok(err instanceof Error) - t.ok(err instanceof errors.ElasticsearchClientError) + t.ok(err instanceof errors.OpenSearchClientError) t.ok(err.hasOwnProperty('meta')) t.end() }) @@ -52,7 +52,7 @@ test('TimeoutError', t => { test('ConnectionError', t => { const err = new errors.ConnectionError() t.ok(err instanceof Error) - t.ok(err instanceof errors.ElasticsearchClientError) + t.ok(err instanceof errors.OpenSearchClientError) t.ok(err.hasOwnProperty('meta')) t.end() }) @@ -60,7 +60,7 @@ test('ConnectionError', t => { test('NoLivingConnectionsError', t => { const err = new errors.NoLivingConnectionsError() t.ok(err instanceof Error) - t.ok(err instanceof errors.ElasticsearchClientError) + t.ok(err instanceof errors.OpenSearchClientError) t.ok(err.hasOwnProperty('meta')) t.end() }) @@ -68,7 +68,7 @@ test('NoLivingConnectionsError', t => { test('SerializationError', t => { const err = new errors.SerializationError() t.ok(err instanceof Error) - t.ok(err instanceof errors.ElasticsearchClientError) + t.ok(err instanceof errors.OpenSearchClientError) t.notOk(err.hasOwnProperty('meta')) t.ok(err.hasOwnProperty('data')) t.end() @@ -77,7 +77,7 @@ test('SerializationError', t => { test('DeserializationError', t => { const err = new errors.DeserializationError() t.ok(err instanceof Error) - t.ok(err instanceof errors.ElasticsearchClientError) + t.ok(err instanceof errors.OpenSearchClientError) t.notOk(err.hasOwnProperty('meta')) t.ok(err.hasOwnProperty('data')) t.end() @@ -86,7 +86,7 @@ test('DeserializationError', t => { test('ConfigurationError', t => { const err = new errors.ConfigurationError() t.ok(err instanceof Error) - t.ok(err instanceof errors.ElasticsearchClientError) + t.ok(err instanceof errors.OpenSearchClientError) t.notOk(err.hasOwnProperty('meta')) t.end() }) @@ -99,7 +99,7 @@ test('ResponseError', t => { } const err = new errors.ResponseError(meta) t.ok(err instanceof Error) - t.ok(err instanceof errors.ElasticsearchClientError) + t.ok(err instanceof errors.OpenSearchClientError) t.ok(err.hasOwnProperty('meta')) t.ok(err.body) t.ok(err.statusCode) @@ -110,7 +110,7 @@ test('ResponseError', t => { test('RequestAbortedError', t => { const err = new errors.RequestAbortedError() t.ok(err instanceof Error) - t.ok(err instanceof errors.ElasticsearchClientError) + t.ok(err instanceof errors.OpenSearchClientError) t.ok(err.hasOwnProperty('meta')) t.end() }) diff --git a/test/unit/esm/index.mjs b/test/unit/esm/index.mjs index 656278e04..f6060eeed 100644 --- a/test/unit/esm/index.mjs +++ b/test/unit/esm/index.mjs @@ -15,5 +15,5 @@ import { Client } from '../../../index.mjs' t.test('esm support', t => { t.plan(1) const client = new Client({ node: 'http://localhost:9200' }) - t.equal(client.name, 'elasticsearch-js') + t.equal(client.name, 'opensearch-js') }) diff --git a/test/unit/events.test.js b/test/unit/events.test.js index fbb4f547c..a9566ca5d 100644 --- a/test/unit/events.test.js +++ b/test/unit/events.test.js @@ -58,7 +58,7 @@ test('Should emit a request event when a request is performed', t => { warnings: null, meta: { context: null, - name: 'elasticsearch-js', + name: 'opensearch-js', request: { params: { method: 'GET', @@ -103,7 +103,7 @@ test('Should emit a request event once when a request is performed', t => { warnings: null, meta: { context: null, - name: 'elasticsearch-js', + name: 'opensearch-js', request: { params: { method: 'GET', @@ -156,7 +156,7 @@ test('Remove an event', t => { warnings: null, meta: { context: null, - name: 'elasticsearch-js', + name: 'opensearch-js', request: { params: { method: 'GET', @@ -213,7 +213,7 @@ test('Should emit a response event in case of a successful response', t => { warnings: null, meta: { context: null, - name: 'elasticsearch-js', + name: 'opensearch-js', request: { params: { method: 'GET', @@ -259,7 +259,7 @@ test('Should emit a response event with the error set', t => { warnings: null, meta: { context: null, - name: 'elasticsearch-js', + name: 'opensearch-js', request: { params: { method: 'GET', diff --git a/test/unit/transport.test.js b/test/unit/transport.test.js index a542fe949..2f081eb88 100644 --- a/test/unit/transport.test.js +++ b/test/unit/transport.test.js @@ -69,7 +69,7 @@ test('Basic', t => { pool.addConnection(`http://localhost:${port}`) const transport = new Transport({ - emit: () => {}, + emit: () => { }, connectionPool: pool, serializer: new Serializer(), maxRetries: 3, @@ -97,7 +97,7 @@ test('Basic (promises support)', t => { pool.addConnection('http://localhost:9200') const transport = new Transport({ - emit: () => {}, + emit: () => { }, connectionPool: pool, serializer: new Serializer(), maxRetries: 3, @@ -125,7 +125,7 @@ test('Basic - failing (promises support)', t => { pool.addConnection('http://localhost:9200') const transport = new Transport({ - emit: () => {}, + emit: () => { }, connectionPool: pool, serializer: new Serializer(), maxRetries: 3, @@ -152,7 +152,7 @@ test('Basic (options + promises support)', t => { pool.addConnection('http://localhost:9200') const transport = new Transport({ - emit: () => {}, + emit: () => { }, connectionPool: pool, serializer: new Serializer(), maxRetries: 3, @@ -198,7 +198,7 @@ test('Send POST', t => { pool.addConnection(`http://localhost:${port}`) const transport = new Transport({ - emit: () => {}, + emit: () => { }, connectionPool: pool, serializer: new Serializer(), maxRetries: 3, @@ -255,7 +255,7 @@ test('Send POST (ndjson)', t => { pool.addConnection(`http://localhost:${port}`) const transport = new Transport({ - emit: () => {}, + emit: () => { }, connectionPool: pool, serializer: new Serializer(), maxRetries: 3, @@ -299,7 +299,7 @@ test('Send stream', t => { pool.addConnection(`http://localhost:${port}`) const transport = new Transport({ - emit: () => {}, + emit: () => { }, connectionPool: pool, serializer: new Serializer(), maxRetries: 3, @@ -343,7 +343,7 @@ test('Send stream (bulkBody)', t => { pool.addConnection(`http://localhost:${port}`) const transport = new Transport({ - emit: () => {}, + emit: () => { }, connectionPool: pool, serializer: new Serializer(), maxRetries: 3, @@ -377,7 +377,7 @@ test('Not JSON payload from server', t => { pool.addConnection(`http://localhost:${port}`) const transport = new Transport({ - emit: () => {}, + emit: () => { }, connectionPool: pool, serializer: new Serializer(), maxRetries: 3, @@ -404,7 +404,7 @@ test('NoLivingConnectionsError (null connection)', t => { pool.addConnection('http://localhost:9200') const transport = new Transport({ - emit: () => {}, + emit: () => { }, connectionPool: pool, serializer: new Serializer(), maxRetries: 3, @@ -433,7 +433,7 @@ test('NoLivingConnectionsError (undefined connection)', t => { pool.addConnection('http://localhost:9200') const transport = new Transport({ - emit: () => {}, + emit: () => { }, connectionPool: pool, serializer: new Serializer(), maxRetries: 3, @@ -462,7 +462,7 @@ test('SerializationError', t => { pool.addConnection('http://localhost:9200') const transport = new Transport({ - emit: () => {}, + emit: () => { }, connectionPool: pool, serializer: new Serializer(), maxRetries: 3, @@ -489,7 +489,7 @@ test('SerializationError (bulk)', t => { pool.addConnection('http://localhost:9200') const transport = new Transport({ - emit: () => {}, + emit: () => { }, connectionPool: pool, serializer: new Serializer(), maxRetries: 3, @@ -522,7 +522,7 @@ test('DeserializationError', t => { pool.addConnection(`http://localhost:${port}`) const transport = new Transport({ - emit: () => {}, + emit: () => { }, connectionPool: pool, serializer: new Serializer(), maxRetries: 3, @@ -559,7 +559,7 @@ test('TimeoutError (should call markDead on the failing connection)', t => { }) const transport = new Transport({ - emit: () => {}, + emit: () => { }, connectionPool: pool, serializer: new Serializer(), maxRetries: 0, @@ -594,7 +594,7 @@ test('ConnectionError (should call markDead on the failing connection)', t => { }) const transport = new Transport({ - emit: () => {}, + emit: () => { }, connectionPool: pool, serializer: new Serializer(), maxRetries: 0, @@ -641,7 +641,7 @@ test('Retry mechanism', t => { }]) const transport = new Transport({ - emit: () => {}, + emit: () => { }, connectionPool: pool, serializer: new Serializer(), maxRetries: 1, @@ -686,7 +686,7 @@ test('Should not retry if the body is a stream', t => { }]) const transport = new Transport({ - emit: () => {}, + emit: () => { }, connectionPool: pool, serializer: new Serializer(), maxRetries: 1, @@ -732,7 +732,7 @@ test('Should not retry if the bulkBody is a stream', t => { }]) const transport = new Transport({ - emit: () => {}, + emit: () => { }, connectionPool: pool, serializer: new Serializer(), maxRetries: 1, @@ -778,7 +778,7 @@ test('No retry', t => { }]) const transport = new Transport({ - emit: () => {}, + emit: () => { }, connectionPool: pool, serializer: new Serializer(), maxRetries: 3, @@ -830,7 +830,7 @@ test('Custom retry mechanism', t => { }]) const transport = new Transport({ - emit: () => {}, + emit: () => { }, connectionPool: pool, serializer: new Serializer(), maxRetries: 0, @@ -877,7 +877,7 @@ test('Should not retry on 429', t => { }]) const transport = new Transport({ - emit: () => {}, + emit: () => { }, connectionPool: pool, serializer: new Serializer(), maxRetries: 5, @@ -915,7 +915,7 @@ test('Should call markAlive with a successful response', t => { }) const transport = new Transport({ - emit: () => {}, + emit: () => { }, connectionPool: pool, serializer: new Serializer(), maxRetries: 3, @@ -952,14 +952,14 @@ test('Should call resurrect on every request', t => { }) const transport = new Transport({ - emit: () => {}, + emit: () => { }, connectionPool: pool, serializer: new Serializer(), maxRetries: 3, requestTimeout: 30000, sniffInterval: false, sniffOnStart: false, - name: 'elasticsearch-js' + name: 'opensearch-js' }) skipCompatibleCheck(transport) @@ -982,7 +982,7 @@ test('Should return a request aborter utility', t => { }) const transport = new Transport({ - emit: () => {}, + emit: () => { }, connectionPool: pool, serializer: new Serializer(), maxRetries: 3, @@ -1061,7 +1061,7 @@ test('Abort a request with the promise API', t => { }) const transport = new Transport({ - emit: () => {}, + emit: () => { }, connectionPool: pool, serializer: new Serializer(), maxRetries: 3, @@ -1101,7 +1101,7 @@ test('ResponseError', t => { pool.addConnection(`http://localhost:${port}`) const transport = new Transport({ - emit: () => {}, + emit: () => { }, connectionPool: pool, serializer: new Serializer(), maxRetries: 3, @@ -1137,7 +1137,7 @@ test('Override requestTimeout', t => { pool.addConnection(`http://localhost:${port}`) const transport = new Transport({ - emit: () => {}, + emit: () => { }, connectionPool: pool, serializer: new Serializer(), maxRetries: 3, @@ -1175,7 +1175,7 @@ test('sniff', t => { // eslint-disable-next-line new MyTransport({ - emit: () => {}, + emit: () => { }, connectionPool: pool, serializer: new Serializer(), maxRetries: 3, @@ -1199,7 +1199,7 @@ test('sniff', t => { pool.addConnection('http://localhost:9200') const transport = new MyTransport({ - emit: () => {}, + emit: () => { }, connectionPool: pool, serializer: new Serializer(), maxRetries: 0, @@ -1234,7 +1234,7 @@ test('sniff', t => { pool.addConnection('http://localhost:9200') const transport = new MyTransport({ - emit: () => {}, + emit: () => { }, connectionPool: pool, serializer: new Serializer(), maxRetries: 3, @@ -1268,7 +1268,7 @@ test('sniff', t => { pool.addConnection('http://localhost:9200') const transport = new Transport({ - emit: () => {}, + emit: () => { }, connectionPool: pool, serializer: new Serializer(), maxRetries: 0, @@ -1305,7 +1305,7 @@ test(`Should mark as dead connections where the statusCode is 502/3/4 pool.addConnection('http://localhost:9200') const transport = new Transport({ - emit: () => {}, + emit: () => { }, connectionPool: pool, serializer: new Serializer(), maxRetries: 0, @@ -1360,7 +1360,7 @@ test('Should retry the request if the statusCode is 502/3/4', t => { pool.addConnection(`http://localhost:${port}`) const transport = new Transport({ - emit: () => {}, + emit: () => { }, connectionPool: pool, serializer: new Serializer(), maxRetries: 1, @@ -1392,7 +1392,7 @@ test('Ignore status code', t => { pool.addConnection('http://localhost:9200') const transport = new Transport({ - emit: () => {}, + emit: () => { }, connectionPool: pool, serializer: new Serializer(), maxRetries: 3, @@ -1442,7 +1442,7 @@ test('Should serialize the querystring', t => { pool.addConnection(`http://localhost:${port}`) const transport = new Transport({ - emit: () => {}, + emit: () => { }, connectionPool: pool, serializer: new Serializer(), maxRetries: 3, @@ -1486,7 +1486,7 @@ test('timeout option', t => { }) const transport = new Transport({ - emit: () => {}, + emit: () => { }, connectionPool: pool, serializer: new Serializer(), maxRetries: 0, @@ -1517,7 +1517,7 @@ test('timeout option', t => { }) const transport = new Transport({ - emit: () => {}, + emit: () => { }, connectionPool: pool, serializer: new Serializer(), maxRetries: 0, @@ -1554,7 +1554,7 @@ test('timeout option', t => { }) const transport = new Transport({ - emit: () => {}, + emit: () => { }, connectionPool: pool, serializer: new Serializer(), maxRetries: 0, @@ -1585,7 +1585,7 @@ test('timeout option', t => { }) const transport = new Transport({ - emit: () => {}, + emit: () => { }, connectionPool: pool, serializer: new Serializer(), maxRetries: 0, @@ -1620,7 +1620,7 @@ test('Should cast to boolean HEAD request', t => { pool.addConnection('http://localhost:9200') const transport = new Transport({ - emit: () => {}, + emit: () => { }, connectionPool: pool, serializer: new Serializer(), maxRetries: 3, @@ -1646,7 +1646,7 @@ test('Should cast to boolean HEAD request', t => { pool.addConnection('http://localhost:9200') const transport = new Transport({ - emit: () => {}, + emit: () => { }, connectionPool: pool, serializer: new Serializer(), maxRetries: 3, @@ -1673,7 +1673,7 @@ test('Should cast to boolean HEAD request', t => { pool.addConnection('http://localhost:9200') const transport = new Transport({ - emit: () => {}, + emit: () => { }, connectionPool: pool, serializer: new Serializer(), maxRetries: 3, @@ -1699,7 +1699,7 @@ test('Should cast to boolean HEAD request', t => { pool.addConnection('http://localhost:9200') const transport = new Transport({ - emit: () => {}, + emit: () => { }, connectionPool: pool, serializer: new Serializer(), maxRetries: 3, @@ -1741,7 +1741,7 @@ test('Suggest compression', t => { pool.addConnection(`http://localhost:${port}`) const transport = new Transport({ - emit: () => {}, + emit: () => { }, connectionPool: pool, serializer: new Serializer(), maxRetries: 3, @@ -1782,7 +1782,7 @@ test('Broken compression', t => { pool.addConnection(`http://localhost:${port}`) const transport = new Transport({ - emit: () => {}, + emit: () => { }, connectionPool: pool, serializer: new Serializer(), maxRetries: 3, @@ -1819,7 +1819,7 @@ test('Warning header', t => { pool.addConnection(`http://localhost:${port}`) const transport = new Transport({ - emit: () => {}, + emit: () => { }, connectionPool: pool, serializer: new Serializer(), maxRetries: 3, @@ -1857,7 +1857,7 @@ test('Warning header', t => { pool.addConnection(`http://localhost:${port}`) const transport = new Transport({ - emit: () => {}, + emit: () => { }, connectionPool: pool, serializer: new Serializer(), maxRetries: 3, @@ -1892,7 +1892,7 @@ test('Warning header', t => { pool.addConnection(`http://localhost:${port}`) const transport = new Transport({ - emit: () => {}, + emit: () => { }, connectionPool: pool, serializer: new Serializer(), maxRetries: 3, @@ -1928,7 +1928,7 @@ test('asStream set to true', t => { pool.addConnection(`http://localhost:${port}`) const transport = new Transport({ - emit: () => {}, + emit: () => { }, connectionPool: pool, serializer: new Serializer(), maxRetries: 3, @@ -1987,7 +1987,7 @@ test('Compress request', t => { pool.addConnection(`http://localhost:${port}`) const transport = new Transport({ - emit: () => {}, + emit: () => { }, connectionPool: pool, serializer: new Serializer(), maxRetries: 3, @@ -2035,7 +2035,7 @@ test('Compress request', t => { pool.addConnection(`http://localhost:${port}`) const transport = new Transport({ - emit: () => {}, + emit: () => { }, connectionPool: pool, serializer: new Serializer(), maxRetries: 3, @@ -2082,7 +2082,7 @@ test('Compress request', t => { pool.addConnection(`http://localhost:${port}`) const transport = new Transport({ - emit: () => {}, + emit: () => { }, connectionPool: pool, serializer: new Serializer(), maxRetries: 3, @@ -2111,7 +2111,7 @@ test('Compress request', t => { try { new Transport({ // eslint-disable-line - emit: () => {}, + emit: () => { }, connectionPool: new ConnectionPool({ Connection }), serializer: new Serializer(), maxRetries: 3, @@ -2141,7 +2141,7 @@ test('Compress request', t => { pool.addConnection(`http://localhost:${port}`) const transport = new Transport({ - emit: () => {}, + emit: () => { }, connectionPool: pool, serializer: new Serializer(), maxRetries: 3, @@ -2209,7 +2209,7 @@ test('Compress request', t => { pool.addConnection(`http://localhost:${port}`) const transport = new Transport({ - emit: () => {}, + emit: () => { }, connectionPool: pool, serializer: new Serializer(), maxRetries: 3, @@ -2251,7 +2251,7 @@ test('Headers configuration', t => { pool.addConnection(`http://localhost:${port}`) const transport = new Transport({ - emit: () => {}, + emit: () => { }, connectionPool: pool, serializer: new Serializer(), maxRetries: 3, @@ -2291,7 +2291,7 @@ test('Headers configuration', t => { pool.addConnection(`http://localhost:${port}`) const transport = new Transport({ - emit: () => {}, + emit: () => { }, connectionPool: pool, serializer: new Serializer(), maxRetries: 3, @@ -2330,7 +2330,7 @@ test('Headers configuration', t => { pool.addConnection(`http://localhost:${port}`) const transport = new Transport({ - emit: () => {}, + emit: () => { }, connectionPool: pool, serializer: new Serializer(), maxRetries: 3, @@ -2366,7 +2366,7 @@ test('nodeFilter and nodeSelector', t => { pool.addConnection('http://localhost:9200') const transport = new Transport({ - emit: () => {}, + emit: () => { }, connectionPool: pool, serializer: new Serializer(), maxRetries: 3, @@ -2408,7 +2408,7 @@ test('Should accept custom querystring in the optons object', t => { pool.addConnection(`http://localhost:${port}`) const transport = new Transport({ - emit: () => {}, + emit: () => { }, connectionPool: pool, serializer: new Serializer(), maxRetries: 3, @@ -2445,7 +2445,7 @@ test('Should accept custom querystring in the optons object', t => { pool.addConnection(`http://localhost:${port}`) const transport = new Transport({ - emit: () => {}, + emit: () => { }, connectionPool: pool, serializer: new Serializer(), maxRetries: 3, @@ -2475,7 +2475,7 @@ test('Should accept custom querystring in the optons object', t => { test('Should add an User-Agent header', t => { t.plan(2) const clientVersion = require('../../package.json').version - const userAgent = `elasticsearch-js/${clientVersion} (${os.platform()} ${os.release()}-${os.arch()}; Node.js ${process.version})` + const userAgent = `opensearch-js/${clientVersion} (${os.platform()} ${os.release()}-${os.arch()}; Node.js ${process.version})` function handler (req, res) { t.match(req.headers, { @@ -2490,7 +2490,7 @@ test('Should add an User-Agent header', t => { pool.addConnection(`http://localhost:${port}`) const transport = new Transport({ - emit: () => {}, + emit: () => { }, connectionPool: pool, serializer: new Serializer(), maxRetries: 3, @@ -2520,7 +2520,7 @@ test('Should pass request params and options to generateRequestId', t => { const options = { context: { winter: 'is coming' } } const transport = new Transport({ - emit: () => {}, + emit: () => { }, connectionPool: pool, serializer: new Serializer(), maxRetries: 3, @@ -2551,7 +2551,7 @@ test('Secure json parsing', t => { pool.addConnection(`http://localhost:${port}`) const transport = new Transport({ - emit: () => {}, + emit: () => { }, connectionPool: pool, serializer: new Serializer(), maxRetries: 3, @@ -2584,7 +2584,7 @@ test('Secure json parsing', t => { pool.addConnection(`http://localhost:${port}`) const transport = new Transport({ - emit: () => {}, + emit: () => { }, connectionPool: pool, serializer: new Serializer(), maxRetries: 3, @@ -2643,7 +2643,7 @@ test('The callback with a sync error should be called in the next tick - json', pool.addConnection('http://localhost:9200') const transport = new Transport({ - emit: () => {}, + emit: () => { }, connectionPool: pool, serializer: new Serializer(), maxRetries: 3, @@ -2675,7 +2675,7 @@ test('The callback with a sync error should be called in the next tick - ndjson' pool.addConnection('http://localhost:9200') const transport = new Transport({ - emit: () => {}, + emit: () => { }, connectionPool: pool, serializer: new Serializer(), maxRetries: 3, diff --git a/test/utils/buildCluster.js b/test/utils/buildCluster.js index 0b35e529b..39abbefa6 100644 --- a/test/utils/buildCluster.js +++ b/test/utils/buildCluster.js @@ -30,7 +30,7 @@ 'use strict' -const debug = require('debug')('elasticsearch-test') +const debug = require('debug')('opensearch-test') const workq = require('workq') const buildServer = require('./buildServer') diff --git a/test/utils/buildServer.js b/test/utils/buildServer.js index 4a6633028..6da842eed 100644 --- a/test/utils/buildServer.js +++ b/test/utils/buildServer.js @@ -30,7 +30,7 @@ 'use strict' -const debug = require('debug')('elasticsearch-test') +const debug = require('debug')('opensearch-test') const stoppable = require('stoppable') // allow self signed certificates for testing purposes diff --git a/test/utils/index.js b/test/utils/index.js index 8efb818e1..b98b0bbcf 100644 --- a/test/utils/index.js +++ b/test/utils/index.js @@ -56,7 +56,7 @@ async function waitCluster (client, waitForStatus = 'green', timeout = '50s', ti function skipCompatibleCheck (client) { const tSymbol = Object.getOwnPropertySymbols(client.transport || client) .filter(symbol => symbol.description === 'compatible check')[0] - ;(client.transport || client)[tSymbol] = 2 + ; (client.transport || client)[tSymbol] = 2 } class NoCompatibleCheckClient extends Client { diff --git a/yarn.lock b/yarn.lock new file mode 100644 index 000000000..6848c679c --- /dev/null +++ b/yarn.lock @@ -0,0 +1,4519 @@ +# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. +# yarn lockfile v1 + + +"@babel/code-frame@^7.0.0", "@babel/code-frame@^7.14.5": + version "7.14.5" + resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.14.5.tgz#23b08d740e83f49c5e59945fbf1b43e80bbf4edb" + integrity sha512-9pzDqyc6OLDaqe+zbACgFkb6fKMNG6CObKpnYXChRsvYGyEdc7CA2BaqeOM+vOtCS5ndmJicPJhKAwYRI6UfFw== + dependencies: + "@babel/highlight" "^7.14.5" + +"@babel/compat-data@^7.14.7", "@babel/compat-data@^7.15.0": + version "7.15.0" + resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.15.0.tgz#2dbaf8b85334796cafbb0f5793a90a2fc010b176" + integrity sha512-0NqAC1IJE0S0+lL1SWFMxMkz1pKCNCjI4tr2Zx4LJSXxCLAdr6KyArnY+sno5m3yH9g737ygOyPABDsnXkpxiA== + +"@babel/core@^7.5.5", "@babel/core@^7.7.5": + version "7.15.0" + resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.15.0.tgz#749e57c68778b73ad8082775561f67f5196aafa8" + integrity sha512-tXtmTminrze5HEUPn/a0JtOzzfp0nk+UEXQ/tqIJo3WDGypl/2OFQEMll/zSFU8f/lfmfLXvTaORHF3cfXIQMw== + dependencies: + "@babel/code-frame" "^7.14.5" + "@babel/generator" "^7.15.0" + "@babel/helper-compilation-targets" "^7.15.0" + "@babel/helper-module-transforms" "^7.15.0" + "@babel/helpers" "^7.14.8" + "@babel/parser" "^7.15.0" + "@babel/template" "^7.14.5" + "@babel/traverse" "^7.15.0" + "@babel/types" "^7.15.0" + convert-source-map "^1.7.0" + debug "^4.1.0" + gensync "^1.0.0-beta.2" + json5 "^2.1.2" + semver "^6.3.0" + source-map "^0.5.0" + +"@babel/generator@^7.15.0": + version "7.15.0" + resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.15.0.tgz#a7d0c172e0d814974bad5aa77ace543b97917f15" + integrity sha512-eKl4XdMrbpYvuB505KTta4AV9g+wWzmVBW69tX0H2NwKVKd2YJbKgyK6M8j/rgLbmHOYJn6rUklV677nOyJrEQ== + dependencies: + "@babel/types" "^7.15.0" + jsesc "^2.5.1" + source-map "^0.5.0" + +"@babel/helper-annotate-as-pure@^7.14.5": + version "7.14.5" + resolved "https://registry.yarnpkg.com/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.14.5.tgz#7bf478ec3b71726d56a8ca5775b046fc29879e61" + integrity sha512-EivH9EgBIb+G8ij1B2jAwSH36WnGvkQSEC6CkX/6v6ZFlw5fVOHvsgGF4uiEHO2GzMvunZb6tDLQEQSdrdocrA== + dependencies: + "@babel/types" "^7.14.5" + +"@babel/helper-compilation-targets@^7.14.5", "@babel/helper-compilation-targets@^7.15.0": + version "7.15.0" + resolved "https://registry.yarnpkg.com/@babel/helper-compilation-targets/-/helper-compilation-targets-7.15.0.tgz#973df8cbd025515f3ff25db0c05efc704fa79818" + integrity sha512-h+/9t0ncd4jfZ8wsdAsoIxSa61qhBYlycXiHWqJaQBCXAhDCMbPRSMTGnZIkkmt1u4ag+UQmuqcILwqKzZ4N2A== + dependencies: + "@babel/compat-data" "^7.15.0" + "@babel/helper-validator-option" "^7.14.5" + browserslist "^4.16.6" + semver "^6.3.0" + +"@babel/helper-function-name@^7.14.5": + version "7.14.5" + resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.14.5.tgz#89e2c474972f15d8e233b52ee8c480e2cfcd50c4" + integrity sha512-Gjna0AsXWfFvrAuX+VKcN/aNNWonizBj39yGwUzVDVTlMYJMK2Wp6xdpy72mfArFq5uK+NOuexfzZlzI1z9+AQ== + dependencies: + "@babel/helper-get-function-arity" "^7.14.5" + "@babel/template" "^7.14.5" + "@babel/types" "^7.14.5" + +"@babel/helper-get-function-arity@^7.14.5": + version "7.14.5" + resolved "https://registry.yarnpkg.com/@babel/helper-get-function-arity/-/helper-get-function-arity-7.14.5.tgz#25fbfa579b0937eee1f3b805ece4ce398c431815" + integrity sha512-I1Db4Shst5lewOM4V+ZKJzQ0JGGaZ6VY1jYvMghRjqs6DWgxLCIyFt30GlnKkfUeFLpJt2vzbMVEXVSXlIFYUg== + dependencies: + "@babel/types" "^7.14.5" + +"@babel/helper-hoist-variables@^7.14.5": + version "7.14.5" + resolved "https://registry.yarnpkg.com/@babel/helper-hoist-variables/-/helper-hoist-variables-7.14.5.tgz#e0dd27c33a78e577d7c8884916a3e7ef1f7c7f8d" + integrity sha512-R1PXiz31Uc0Vxy4OEOm07x0oSjKAdPPCh3tPivn/Eo8cvz6gveAeuyUUPB21Hoiif0uoPQSSdhIPS3352nvdyQ== + dependencies: + "@babel/types" "^7.14.5" + +"@babel/helper-member-expression-to-functions@^7.15.0": + version "7.15.0" + resolved "https://registry.yarnpkg.com/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.15.0.tgz#0ddaf5299c8179f27f37327936553e9bba60990b" + integrity sha512-Jq8H8U2kYiafuj2xMTPQwkTBnEEdGKpT35lJEQsRRjnG0LW3neucsaMWLgKcwu3OHKNeYugfw+Z20BXBSEs2Lg== + dependencies: + "@babel/types" "^7.15.0" + +"@babel/helper-module-imports@^7.14.5": + version "7.14.5" + resolved "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.14.5.tgz#6d1a44df6a38c957aa7c312da076429f11b422f3" + integrity sha512-SwrNHu5QWS84XlHwGYPDtCxcA0hrSlL2yhWYLgeOc0w7ccOl2qv4s/nARI0aYZW+bSwAL5CukeXA47B/1NKcnQ== + dependencies: + "@babel/types" "^7.14.5" + +"@babel/helper-module-transforms@^7.15.0": + version "7.15.0" + resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.15.0.tgz#679275581ea056373eddbe360e1419ef23783b08" + integrity sha512-RkGiW5Rer7fpXv9m1B3iHIFDZdItnO2/BLfWVW/9q7+KqQSDY5kUfQEbzdXM1MVhJGcugKV7kRrNVzNxmk7NBg== + dependencies: + "@babel/helper-module-imports" "^7.14.5" + "@babel/helper-replace-supers" "^7.15.0" + "@babel/helper-simple-access" "^7.14.8" + "@babel/helper-split-export-declaration" "^7.14.5" + "@babel/helper-validator-identifier" "^7.14.9" + "@babel/template" "^7.14.5" + "@babel/traverse" "^7.15.0" + "@babel/types" "^7.15.0" + +"@babel/helper-optimise-call-expression@^7.14.5": + version "7.14.5" + resolved "https://registry.yarnpkg.com/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.14.5.tgz#f27395a8619e0665b3f0364cddb41c25d71b499c" + integrity sha512-IqiLIrODUOdnPU9/F8ib1Fx2ohlgDhxnIDU7OEVi+kAbEZcyiF7BLU8W6PfvPi9LzztjS7kcbzbmL7oG8kD6VA== + dependencies: + "@babel/types" "^7.14.5" + +"@babel/helper-plugin-utils@^7.14.5", "@babel/helper-plugin-utils@^7.8.0": + version "7.14.5" + resolved "https://registry.yarnpkg.com/@babel/helper-plugin-utils/-/helper-plugin-utils-7.14.5.tgz#5ac822ce97eec46741ab70a517971e443a70c5a9" + integrity sha512-/37qQCE3K0vvZKwoK4XU/irIJQdIfCJuhU5eKnNxpFDsOkgFaUAwbv+RYw6eYgsC0E4hS7r5KqGULUogqui0fQ== + +"@babel/helper-replace-supers@^7.15.0": + version "7.15.0" + resolved "https://registry.yarnpkg.com/@babel/helper-replace-supers/-/helper-replace-supers-7.15.0.tgz#ace07708f5bf746bf2e6ba99572cce79b5d4e7f4" + integrity sha512-6O+eWrhx+HEra/uJnifCwhwMd6Bp5+ZfZeJwbqUTuqkhIT6YcRhiZCOOFChRypOIe0cV46kFrRBlm+t5vHCEaA== + dependencies: + "@babel/helper-member-expression-to-functions" "^7.15.0" + "@babel/helper-optimise-call-expression" "^7.14.5" + "@babel/traverse" "^7.15.0" + "@babel/types" "^7.15.0" + +"@babel/helper-simple-access@^7.14.8": + version "7.14.8" + resolved "https://registry.yarnpkg.com/@babel/helper-simple-access/-/helper-simple-access-7.14.8.tgz#82e1fec0644a7e775c74d305f212c39f8fe73924" + integrity sha512-TrFN4RHh9gnWEU+s7JloIho2T76GPwRHhdzOWLqTrMnlas8T9O7ec+oEDNsRXndOmru9ymH9DFrEOxpzPoSbdg== + dependencies: + "@babel/types" "^7.14.8" + +"@babel/helper-split-export-declaration@^7.14.5": + version "7.14.5" + resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.14.5.tgz#22b23a54ef51c2b7605d851930c1976dd0bc693a" + integrity sha512-hprxVPu6e5Kdp2puZUmvOGjaLv9TCe58E/Fl6hRq4YiVQxIcNvuq6uTM2r1mT/oPskuS9CgR+I94sqAYv0NGKA== + dependencies: + "@babel/types" "^7.14.5" + +"@babel/helper-validator-identifier@^7.14.5", "@babel/helper-validator-identifier@^7.14.9": + version "7.14.9" + resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.14.9.tgz#6654d171b2024f6d8ee151bf2509699919131d48" + integrity sha512-pQYxPY0UP6IHISRitNe8bsijHex4TWZXi2HwKVsjPiltzlhse2znVcm9Ace510VT1kxIHjGJCZZQBX2gJDbo0g== + +"@babel/helper-validator-option@^7.14.5": + version "7.14.5" + resolved "https://registry.yarnpkg.com/@babel/helper-validator-option/-/helper-validator-option-7.14.5.tgz#6e72a1fff18d5dfcb878e1e62f1a021c4b72d5a3" + integrity sha512-OX8D5eeX4XwcroVW45NMvoYaIuFI+GQpA2a8Gi+X/U/cDUIRsV37qQfF905F0htTRCREQIB4KqPeaveRJUl3Ow== + +"@babel/helpers@^7.14.8": + version "7.14.8" + resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.14.8.tgz#839f88f463025886cff7f85a35297007e2da1b77" + integrity sha512-ZRDmI56pnV+p1dH6d+UN6GINGz7Krps3+270qqI9UJ4wxYThfAIcI5i7j5vXC4FJ3Wap+S9qcebxeYiqn87DZw== + dependencies: + "@babel/template" "^7.14.5" + "@babel/traverse" "^7.14.8" + "@babel/types" "^7.14.8" + +"@babel/highlight@^7.14.5": + version "7.14.5" + resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.14.5.tgz#6861a52f03966405001f6aa534a01a24d99e8cd9" + integrity sha512-qf9u2WFWVV0MppaL877j2dBtQIDgmidgjGk5VIMw3OadXvYaXn66U1BFlH2t4+t3i+8PhedppRv+i40ABzd+gg== + dependencies: + "@babel/helper-validator-identifier" "^7.14.5" + chalk "^2.0.0" + js-tokens "^4.0.0" + +"@babel/parser@^7.14.5", "@babel/parser@^7.15.0": + version "7.15.2" + resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.15.2.tgz#08d4ffcf90d211bf77e7cc7154c6f02d468d2b1d" + integrity sha512-bMJXql1Ss8lFnvr11TZDH4ArtwlAS5NG9qBmdiFW2UHHm6MVoR+GDc5XE2b9K938cyjc9O6/+vjjcffLDtfuDg== + +"@babel/plugin-proposal-object-rest-spread@^7.5.5": + version "7.14.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.14.7.tgz#5920a2b3df7f7901df0205974c0641b13fd9d363" + integrity sha512-082hsZz+sVabfmDWo1Oct1u1AgbKbUAyVgmX4otIc7bdsRgHBXwTwb3DpDmD4Eyyx6DNiuz5UAATT655k+kL5g== + dependencies: + "@babel/compat-data" "^7.14.7" + "@babel/helper-compilation-targets" "^7.14.5" + "@babel/helper-plugin-utils" "^7.14.5" + "@babel/plugin-syntax-object-rest-spread" "^7.8.3" + "@babel/plugin-transform-parameters" "^7.14.5" + +"@babel/plugin-syntax-jsx@^7.14.5": + version "7.14.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.14.5.tgz#000e2e25d8673cce49300517a3eda44c263e4201" + integrity sha512-ohuFIsOMXJnbOMRfX7/w7LocdR6R7whhuRD4ax8IipLcLPlZGJKkBxgHp++U4N/vKyU16/YDQr2f5seajD3jIw== + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + +"@babel/plugin-syntax-object-rest-spread@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz#60e225edcbd98a640332a2e72dd3e66f1af55871" + integrity sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-transform-destructuring@^7.5.0": + version "7.14.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.14.7.tgz#0ad58ed37e23e22084d109f185260835e5557576" + integrity sha512-0mDE99nK+kVh3xlc5vKwB6wnP9ecuSj+zQCa/n0voENtP/zymdT4HH6QEb65wjjcbqr1Jb/7z9Qp7TF5FtwYGw== + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + +"@babel/plugin-transform-parameters@^7.14.5": + version "7.14.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.14.5.tgz#49662e86a1f3ddccac6363a7dfb1ff0a158afeb3" + integrity sha512-Tl7LWdr6HUxTmzQtzuU14SqbgrSKmaR77M0OKyq4njZLQTPfOvzblNKyNkGwOfEFCEx7KeYHQHDI0P3F02IVkA== + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + +"@babel/plugin-transform-react-jsx@^7.3.0": + version "7.14.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-jsx/-/plugin-transform-react-jsx-7.14.9.tgz#3314b2163033abac5200a869c4de242cd50a914c" + integrity sha512-30PeETvS+AeD1f58i1OVyoDlVYQhap/K20ZrMjLmmzmC2AYR/G43D4sdJAaDAqCD3MYpSWbmrz3kES158QSLjw== + dependencies: + "@babel/helper-annotate-as-pure" "^7.14.5" + "@babel/helper-module-imports" "^7.14.5" + "@babel/helper-plugin-utils" "^7.14.5" + "@babel/plugin-syntax-jsx" "^7.14.5" + "@babel/types" "^7.14.9" + +"@babel/template@^7.14.5": + version "7.14.5" + resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.14.5.tgz#a9bc9d8b33354ff6e55a9c60d1109200a68974f4" + integrity sha512-6Z3Po85sfxRGachLULUhOmvAaOo7xCvqGQtxINai2mEGPFm6pQ4z5QInFnUrRpfoSV60BnjyF5F3c+15fxFV1g== + dependencies: + "@babel/code-frame" "^7.14.5" + "@babel/parser" "^7.14.5" + "@babel/types" "^7.14.5" + +"@babel/traverse@^7.14.8", "@babel/traverse@^7.15.0": + version "7.15.0" + resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.15.0.tgz#4cca838fd1b2a03283c1f38e141f639d60b3fc98" + integrity sha512-392d8BN0C9eVxVWd8H6x9WfipgVH5IaIoLp23334Sc1vbKKWINnvwRpb4us0xtPaCumlwbTtIYNA0Dv/32sVFw== + dependencies: + "@babel/code-frame" "^7.14.5" + "@babel/generator" "^7.15.0" + "@babel/helper-function-name" "^7.14.5" + "@babel/helper-hoist-variables" "^7.14.5" + "@babel/helper-split-export-declaration" "^7.14.5" + "@babel/parser" "^7.15.0" + "@babel/types" "^7.15.0" + debug "^4.1.0" + globals "^11.1.0" + +"@babel/types@^7.14.5", "@babel/types@^7.14.8", "@babel/types@^7.14.9", "@babel/types@^7.15.0": + version "7.15.0" + resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.15.0.tgz#61af11f2286c4e9c69ca8deb5f4375a73c72dcbd" + integrity sha512-OBvfqnllOIdX4ojTHpwZbpvz4j3EWyjkZEdmjH0/cgsd6QOdSgU8rLSk6ard/pcW7rlmjdVSX/AWOaORR1uNOQ== + dependencies: + "@babel/helper-validator-identifier" "^7.14.9" + to-fast-properties "^2.0.0" + +"@eslint/eslintrc@^0.2.1": + version "0.2.2" + resolved "https://registry.yarnpkg.com/@eslint/eslintrc/-/eslintrc-0.2.2.tgz#d01fc791e2fc33e88a29d6f3dc7e93d0cd784b76" + integrity sha512-EfB5OHNYp1F4px/LI/FEnGylop7nOqkQ1LRzCM0KccA2U8tvV8w01KBv37LbO7nW4H+YhKyo2LcJhRwjjV17QQ== + dependencies: + ajv "^6.12.4" + debug "^4.1.1" + espree "^7.3.0" + globals "^12.1.0" + ignore "^4.0.6" + import-fresh "^3.2.1" + js-yaml "^3.13.1" + lodash "^4.17.19" + minimatch "^3.0.4" + strip-json-comments "^3.1.1" + +"@istanbuljs/load-nyc-config@^1.0.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz#fd3db1d59ecf7cf121e80650bb86712f9b55eced" + integrity sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ== + dependencies: + camelcase "^5.3.1" + find-up "^4.1.0" + get-package-type "^0.1.0" + js-yaml "^3.13.1" + resolve-from "^5.0.0" + +"@istanbuljs/schema@^0.1.2": + version "0.1.3" + resolved "https://registry.yarnpkg.com/@istanbuljs/schema/-/schema-0.1.3.tgz#e45e384e4b8ec16bce2fd903af78450f6bf7ec98" + integrity sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA== + +"@kwsites/file-exists@^1.1.1": + version "1.1.1" + resolved "https://registry.yarnpkg.com/@kwsites/file-exists/-/file-exists-1.1.1.tgz#ad1efcac13e1987d8dbaf235ef3be5b0d96faa99" + integrity sha512-m9/5YGR18lIwxSFDwfE3oA7bWuq9kdau6ugN4H2rJeyhFQZcG9AgSHkQtSD15a8WvTgfz9aikZMrKPHvbpqFiw== + dependencies: + debug "^4.1.1" + +"@kwsites/promise-deferred@^1.1.1": + version "1.1.1" + resolved "https://registry.yarnpkg.com/@kwsites/promise-deferred/-/promise-deferred-1.1.1.tgz#8ace5259254426ccef57f3175bc64ed7095ed919" + integrity sha512-GaHYm+c0O9MjZRu0ongGBRbinu8gVAMd2UZjji6jVmqKtZluZnptXGWhz1E8j8D2HJ3f/yMxKAUC0b+57wncIw== + +"@nodelib/fs.scandir@2.1.5": + version "2.1.5" + resolved "https://registry.yarnpkg.com/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz#7619c2eb21b25483f6d167548b4cfd5a7488c3d5" + integrity sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g== + dependencies: + "@nodelib/fs.stat" "2.0.5" + run-parallel "^1.1.9" + +"@nodelib/fs.stat@2.0.5", "@nodelib/fs.stat@^2.0.2": + version "2.0.5" + resolved "https://registry.yarnpkg.com/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz#5bd262af94e9d25bd1e71b05deed44876a222e8b" + integrity sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A== + +"@nodelib/fs.walk@^1.2.3": + version "1.2.8" + resolved "https://registry.yarnpkg.com/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz#e95737e8bb6746ddedf69c556953494f196fe69a" + integrity sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg== + dependencies: + "@nodelib/fs.scandir" "2.1.5" + fastq "^1.6.0" + +"@oozcitak/dom@1.15.8": + version "1.15.8" + resolved "https://registry.yarnpkg.com/@oozcitak/dom/-/dom-1.15.8.tgz#0c0c7bb54cfdaadc07fd637913e706101721d15d" + integrity sha512-MoOnLBNsF+ok0HjpAvxYxR4piUhRDCEWK0ot3upwOOHYudJd30j6M+LNcE8RKpwfnclAX9T66nXXzkytd29XSw== + dependencies: + "@oozcitak/infra" "1.0.8" + "@oozcitak/url" "1.0.4" + "@oozcitak/util" "8.3.8" + +"@oozcitak/infra@1.0.8": + version "1.0.8" + resolved "https://registry.yarnpkg.com/@oozcitak/infra/-/infra-1.0.8.tgz#b0b089421f7d0f6878687608301fbaba837a7d17" + integrity sha512-JRAUc9VR6IGHOL7OGF+yrvs0LO8SlqGnPAMqyzOuFZPSZSXI7Xf2O9+awQPSMXgIWGtgUf/dA6Hs6X6ySEaWTg== + dependencies: + "@oozcitak/util" "8.3.8" + +"@oozcitak/url@1.0.4": + version "1.0.4" + resolved "https://registry.yarnpkg.com/@oozcitak/url/-/url-1.0.4.tgz#ca8b1c876319cf5a648dfa1123600a6aa5cda6ba" + integrity sha512-kDcD8y+y3FCSOvnBI6HJgl00viO/nGbQoCINmQ0h98OhnGITrWR3bOGfwYCthgcrV8AnTJz8MzslTQbC3SOAmw== + dependencies: + "@oozcitak/infra" "1.0.8" + "@oozcitak/util" "8.3.8" + +"@oozcitak/util@8.3.8": + version "8.3.8" + resolved "https://registry.yarnpkg.com/@oozcitak/util/-/util-8.3.8.tgz#10f65fe1891fd8cde4957360835e78fd1936bfdd" + integrity sha512-T8TbSnGsxo6TDBJx/Sgv/BlVJL3tshxZP7Aq5R1mSnM5OcHY2dQaxLMu2+E8u3gN0MLOzdjurqN4ZRVuzQycOQ== + +"@sindresorhus/is@^0.14.0": + version "0.14.0" + resolved "https://registry.yarnpkg.com/@sindresorhus/is/-/is-0.14.0.tgz#9fb3a3cf3132328151f353de4632e01e52102bea" + integrity sha512-9NET910DNaIPngYnLLPeg+Ogzqsi9uM4mSboU5y6p8S5DzMTVEsJZrawi+BoDNUVBa2DhJqQYUFvMDfgU062LQ== + +"@sinonjs/commons@^1.7.0": + version "1.8.3" + resolved "https://registry.yarnpkg.com/@sinonjs/commons/-/commons-1.8.3.tgz#3802ddd21a50a949b6721ddd72da36e67e7f1b2d" + integrity sha512-xkNcLAn/wZaX14RPlwizcKicDk9G3F8m2nU3L7Ukm5zBgTwiT0wsoFAHx9Jq56fJA1z/7uKGtCRu16sOUCLIHQ== + dependencies: + type-detect "4.0.8" + +"@sinonjs/fake-timers@github:sinonjs/fake-timers#0bfffc1": + version "6.0.1" + resolved "https://codeload.github.com/sinonjs/fake-timers/tar.gz/0bfffc1810990f6e5dc42c5238e48cd90bd41265" + dependencies: + "@sinonjs/commons" "^1.7.0" + +"@szmarczak/http-timer@^1.1.2": + version "1.1.2" + resolved "https://registry.yarnpkg.com/@szmarczak/http-timer/-/http-timer-1.1.2.tgz#b1665e2c461a2cd92f4c1bbf50d5454de0d4b421" + integrity sha512-XIB2XbzHTN6ieIjfIMV9hlVcfPU26s2vafYWQcZHWXHOxiaRZYEDKEwdl129Zyg50+foYV2jCgtrqSA6qNuNSA== + dependencies: + defer-to-connect "^1.0.1" + +"@types/eslint@^7.2.13": + version "7.28.0" + resolved "https://registry.yarnpkg.com/@types/eslint/-/eslint-7.28.0.tgz#7e41f2481d301c68e14f483fe10b017753ce8d5a" + integrity sha512-07XlgzX0YJUn4iG1ocY4IX9DzKSmMGUs6ESKlxWhZRaa0fatIWaHWUVapcuGa8r5HFnTqzj+4OCjd5f7EZ/i/A== + dependencies: + "@types/estree" "*" + "@types/json-schema" "*" + +"@types/estree@*": + version "0.0.50" + resolved "https://registry.yarnpkg.com/@types/estree/-/estree-0.0.50.tgz#1e0caa9364d3fccd2931c3ed96fdbeaa5d4cca83" + integrity sha512-C6N5s2ZFtuZRj54k2/zyRhNDjJwwcViAM3Nbm8zjBpbqAdZ00mr0CFxvSKeO8Y/e03WVFLpQMdHYVfUd6SB+Hw== + +"@types/json-schema@*": + version "7.0.9" + resolved "https://registry.yarnpkg.com/@types/json-schema/-/json-schema-7.0.9.tgz#97edc9037ea0c38585320b28964dde3b39e4660d" + integrity sha512-qcUXuemtEu+E5wZSJHNxUXeCZhAfXKQ41D+duX+VYPde7xyEVZci+/oXKJL13tnRs9lR2pr4fod59GT6/X1/yQ== + +"@types/minimist@^1.2.0": + version "1.2.2" + resolved "https://registry.yarnpkg.com/@types/minimist/-/minimist-1.2.2.tgz#ee771e2ba4b3dc5b372935d549fd9617bf345b8c" + integrity sha512-jhuKLIRrhvCPLqwPcx6INqmKeiA5EWrsCOPhrlFSrbrmU4ZMPjj5Ul/oLCMDO98XRUIwVm78xICz4EPCektzeQ== + +"@types/node@*": + version "16.4.13" + resolved "https://registry.yarnpkg.com/@types/node/-/node-16.4.13.tgz#7dfd9c14661edc65cccd43a29eb454174642370d" + integrity sha512-bLL69sKtd25w7p1nvg9pigE4gtKVpGTPojBFLMkGHXuUgap2sLqQt2qUnqmVCDfzGUL0DRNZP+1prIZJbMeAXg== + +"@types/node@^15.3.1": + version "15.14.7" + resolved "https://registry.yarnpkg.com/@types/node/-/node-15.14.7.tgz#29fea9a5b14e2b75c19028e1c7a32edd1e89fe92" + integrity sha512-FA45p37/mLhpebgbPWWCKfOisTjxGK9lwcHlJ6XVLfu3NgfcazOJHdYUZCWPMK8QX4LhNZdmfo6iMz9FqpUbaw== + +"@types/normalize-package-data@^2.4.0": + version "2.4.1" + resolved "https://registry.yarnpkg.com/@types/normalize-package-data/-/normalize-package-data-2.4.1.tgz#d3357479a0fdfdd5907fe67e17e0a85c906e1301" + integrity sha512-Gj7cI7z+98M282Tqmp2K5EIsoouUEzbBJhQQzDE3jSIRk6r9gsz0oUokqIUR4u1R3dMHo0pDHM7sNOHyhulypw== + +"@types/prop-types@*": + version "15.7.4" + resolved "https://registry.yarnpkg.com/@types/prop-types/-/prop-types-15.7.4.tgz#fcf7205c25dff795ee79af1e30da2c9790808f11" + integrity sha512-rZ5drC/jWjrArrS8BR6SIr4cWpW09RNTYt9AMZo3Jwwif+iacXAqgVjm0B0Bv/S1jhDXKHqRVNCbACkJ89RAnQ== + +"@types/react@^16.9.23": + version "16.14.12" + resolved "https://registry.yarnpkg.com/@types/react/-/react-16.14.12.tgz#1e38e2114e568f6541f88628a207f72630ee161f" + integrity sha512-7nOJgNsRbARhZhvwPm7cnzahtzEi5VJ9OvcQk8ExEEb1t+zaFklwLVkJz7G1kfxX4X/mDa/icTmzE0vTmqsqBg== + dependencies: + "@types/prop-types" "*" + "@types/scheduler" "*" + csstype "^3.0.2" + +"@types/scheduler@*": + version "0.16.2" + resolved "https://registry.yarnpkg.com/@types/scheduler/-/scheduler-0.16.2.tgz#1a62f89525723dde24ba1b01b092bf5df8ad4d39" + integrity sha512-hppQEBDmlwhFAXKJX2KnWLYu5yMfi91yazPb2l+lbJiwW+wdo1gNeRA+3RgNSO39WYX2euey41KEwnqesU2Jew== + +"@types/yoga-layout@1.9.2": + version "1.9.2" + resolved "https://registry.yarnpkg.com/@types/yoga-layout/-/yoga-layout-1.9.2.tgz#efaf9e991a7390dc081a0b679185979a83a9639a" + integrity sha512-S9q47ByT2pPvD65IvrWp7qppVMpk9WGMbVq9wbWZOHg6tnXSD4vyhao6nOSBwwfDdV2p3Kx9evA9vI+XWTfDvw== + +abbrev@1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/abbrev/-/abbrev-1.1.1.tgz#f8f2c887ad10bf67f634f005b6987fed3179aac8" + integrity sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q== + +acorn-jsx@^5.3.1: + version "5.3.2" + resolved "https://registry.yarnpkg.com/acorn-jsx/-/acorn-jsx-5.3.2.tgz#7ed5bb55908b3b2f1bc55c6af1653bada7f07937" + integrity sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ== + +acorn@^7.4.0: + version "7.4.1" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-7.4.1.tgz#feaed255973d2e77555b83dbc08851a6c63520fa" + integrity sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A== + +aggregate-error@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/aggregate-error/-/aggregate-error-3.1.0.tgz#92670ff50f5359bdb7a3e0d40d0ec30c5737687a" + integrity sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA== + dependencies: + clean-stack "^2.0.0" + indent-string "^4.0.0" + +ajv@^6.10.0, ajv@^6.10.2, ajv@^6.12.3, ajv@^6.12.4: + version "6.12.6" + resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.12.6.tgz#baf5a62e802b07d977034586f8c3baf5adf26df4" + integrity sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g== + dependencies: + fast-deep-equal "^3.1.1" + fast-json-stable-stringify "^2.0.0" + json-schema-traverse "^0.4.1" + uri-js "^4.2.2" + +ansi-align@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/ansi-align/-/ansi-align-3.0.0.tgz#b536b371cf687caaef236c18d3e21fe3797467cb" + integrity sha512-ZpClVKqXN3RGBmKibdfWzqCY4lnjEuoNzU5T0oEFpfd/z5qJHVarukridD4juLO2FXMiwUQxr9WqQtaYa8XRYw== + dependencies: + string-width "^3.0.0" + +ansi-colors@^4.1.1: + version "4.1.1" + resolved "https://registry.yarnpkg.com/ansi-colors/-/ansi-colors-4.1.1.tgz#cbb9ae256bf750af1eab344f229aa27fe94ba348" + integrity sha512-JoX0apGbHaUJBNl6yF+p6JAFYZ666/hhCGKN5t9QFjbJQKUU/g8MNbFDbvfrgKXvI1QpZplPOnwIo99lX/AAmA== + +ansi-escapes@^3.2.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-3.2.0.tgz#8780b98ff9dbf5638152d1f1fe5c1d7b4442976b" + integrity sha512-cBhpre4ma+U0T1oM5fXg7Dy1Jw7zzwv7lt/GoCpr+hDQJoYnKVPLL4dCvSEFMmQurOQvSrwT7SL/DAlhBI97RQ== + +ansi-escapes@^4.2.1: + version "4.3.2" + resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-4.3.2.tgz#6b2291d1db7d98b6521d5f1efa42d0f3a9feb65e" + integrity sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ== + dependencies: + type-fest "^0.21.3" + +ansi-regex@^2.0.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-2.1.1.tgz#c3b33ab5ee360d86e0e628f0468ae7ef27d654df" + integrity sha1-w7M6te42DYbg5ijwRorn7yfWVN8= + +ansi-regex@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-3.0.0.tgz#ed0317c322064f79466c02966bddb605ab37d998" + integrity sha1-7QMXwyIGT3lGbAKWa922Bas32Zg= + +ansi-regex@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-4.1.0.tgz#8b9f8f08cf1acb843756a839ca8c7e3168c51997" + integrity sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg== + +ansi-regex@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-5.0.0.tgz#388539f55179bf39339c81af30a654d69f87cb75" + integrity sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg== + +ansi-styles@^3.2.0, ansi-styles@^3.2.1: + version "3.2.1" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d" + integrity sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA== + dependencies: + color-convert "^1.9.0" + +ansi-styles@^4.0.0, ansi-styles@^4.1.0: + version "4.3.0" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-4.3.0.tgz#edd803628ae71c04c85ae7a0906edad34b648937" + integrity sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg== + dependencies: + color-convert "^2.0.1" + +ansicolors@~0.3.2: + version "0.3.2" + resolved "https://registry.yarnpkg.com/ansicolors/-/ansicolors-0.3.2.tgz#665597de86a9ffe3aa9bfbe6cae5c6ea426b4979" + integrity sha1-ZlWX3oap/+Oqm/vmyuXG6kJrSXk= + +anymatch@~3.1.2: + version "3.1.2" + resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-3.1.2.tgz#c0557c096af32f106198f4f4e2a383537e378716" + integrity sha512-P43ePfOAIupkguHUycrc4qJ9kz8ZiuOUijaETwX7THt0Y/GNK7v0aa8rY816xWjZ7rJdA5XdMcpVFTKMq+RvWg== + dependencies: + normalize-path "^3.0.0" + picomatch "^2.0.4" + +append-transform@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/append-transform/-/append-transform-2.0.0.tgz#99d9d29c7b38391e6f428d28ce136551f0b77e12" + integrity sha512-7yeyCEurROLQJFv5Xj4lEGTy0borxepjFv1g22oAdqFu//SrAlDl1O1Nxx15SH1RoliUml6p8dwJW9jvZughhg== + dependencies: + default-require-extensions "^3.0.0" + +archy@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/archy/-/archy-1.0.0.tgz#f9c8c13757cc1dd7bc379ac77b2c62a5c2868c40" + integrity sha1-+cjBN1fMHde8N5rHeyxipcKGjEA= + +argparse@^1.0.7: + version "1.0.10" + resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911" + integrity sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg== + dependencies: + sprintf-js "~1.0.2" + +argparse@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/argparse/-/argparse-2.0.1.tgz#246f50f3ca78a3240f6c997e8a9bd1eac49e4b38" + integrity sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q== + +args@5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/args/-/args-5.0.1.tgz#4bf298df90a4799a09521362c579278cc2fdd761" + integrity sha512-1kqmFCFsPffavQFGt8OxJdIcETti99kySRUPMpOhaGjL6mRJn8HFU1OxKY5bMqfZKUwTQc1mZkAjmGYaVOHFtQ== + dependencies: + camelcase "5.0.0" + chalk "2.4.2" + leven "2.1.0" + mri "1.1.4" + +array-find-index@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/array-find-index/-/array-find-index-1.0.2.tgz#df010aa1287e164bbda6f9723b0a96a1ec4187a1" + integrity sha1-3wEKoSh+Fku9pvlyOwqWoexBh6E= + +array-includes@^3.1.1, array-includes@^3.1.2: + version "3.1.3" + resolved "https://registry.yarnpkg.com/array-includes/-/array-includes-3.1.3.tgz#c7f619b382ad2afaf5326cddfdc0afc61af7690a" + integrity sha512-gcem1KlBU7c9rB+Rq8/3PPKsK2kjqeEBa3bD5kkQo4nYlOHQCJqIJFqBXDEfwaRuYTT4E+FxA9xez7Gf/e3Q7A== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.18.0-next.2" + get-intrinsic "^1.1.1" + is-string "^1.0.5" + +array-union@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/array-union/-/array-union-2.1.0.tgz#b798420adbeb1de828d84acd8a2e23d3efe85e8d" + integrity sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw== + +array.prototype.flat@^1.2.3: + version "1.2.4" + resolved "https://registry.yarnpkg.com/array.prototype.flat/-/array.prototype.flat-1.2.4.tgz#6ef638b43312bd401b4c6199fdec7e2dc9e9a123" + integrity sha512-4470Xi3GAPAjZqFcljX2xzckv1qeKPizoNkiS0+O4IoPR2ZNpcjE0pkhdihlDouK+x6QOast26B4Q/O9DJnwSg== + dependencies: + call-bind "^1.0.0" + define-properties "^1.1.3" + es-abstract "^1.18.0-next.1" + +array.prototype.flatmap@^1.2.3: + version "1.2.4" + resolved "https://registry.yarnpkg.com/array.prototype.flatmap/-/array.prototype.flatmap-1.2.4.tgz#94cfd47cc1556ec0747d97f7c7738c58122004c9" + integrity sha512-r9Z0zYoxqHz60vvQbWEdXIEtCwHF0yxaWfno9qzXeNHvfyl3BZqygmGzb84dsubyaXLH4husF+NFgMSdpZhk2Q== + dependencies: + call-bind "^1.0.0" + define-properties "^1.1.3" + es-abstract "^1.18.0-next.1" + function-bind "^1.1.1" + +arrify@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/arrify/-/arrify-1.0.1.tgz#898508da2226f380df904728456849c1501a4b0d" + integrity sha1-iYUI2iIm84DfkEcoRWhJwVAaSw0= + +arrify@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/arrify/-/arrify-2.0.1.tgz#c9655e9331e0abcd588d2a7cad7e9956f66701fa" + integrity sha512-3duEwti880xqi4eAMN8AyR4a0ByT90zoYdLlevfrvU43vb0YZwZVfxOgxWrLXXXpyugL0hNZc9G6BiB5B3nUug== + +asap@^2.0.0: + version "2.0.6" + resolved "https://registry.yarnpkg.com/asap/-/asap-2.0.6.tgz#e50347611d7e690943208bbdafebcbc2fb866d46" + integrity sha1-5QNHYR1+aQlDIIu9r+vLwvuGbUY= + +asn1@~0.2.3: + version "0.2.4" + resolved "https://registry.yarnpkg.com/asn1/-/asn1-0.2.4.tgz#8d2475dfab553bb33e77b54e59e880bb8ce23136" + integrity sha512-jxwzQpLQjSmWXgwaCZE9Nz+glAG01yF1QnWgbhGwHI5A6FRIEY6IVqtHhIepHqI7/kyEyQEagBC5mBEFlIYvdg== + dependencies: + safer-buffer "~2.1.0" + +assert-plus@1.0.0, assert-plus@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/assert-plus/-/assert-plus-1.0.0.tgz#f12e0f3c5d77b0b1cdd9146942e4e96c1e4dd525" + integrity sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU= + +astral-regex@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/astral-regex/-/astral-regex-1.0.0.tgz#6c8c3fb827dd43ee3918f27b82782ab7658a6fd9" + integrity sha512-+Ryf6g3BKoRc7jfp7ad8tM4TtMiaWvbF/1/sQcZPkkS7ag3D5nMBCe2UfOTONtAkaG0tO0ij3C5Lwmf1EiyjHg== + +astral-regex@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/astral-regex/-/astral-regex-2.0.0.tgz#483143c567aeed4785759c0865786dc77d7d2e31" + integrity sha512-Z7tMw1ytTXt5jqMcOP+OQteU1VuNK9Y02uuJtKQ1Sv69jXQKKg5cibLwGJow8yzZP+eAc18EmLGPal0bp36rvQ== + +async-hook-domain@^2.0.1: + version "2.0.3" + resolved "https://registry.yarnpkg.com/async-hook-domain/-/async-hook-domain-2.0.3.tgz#702d86fc21866bcb0f38b32214d04c2e5a311429" + integrity sha512-MadiLLDEZRZzZwcm0dgS+K99qXZ4H2saAUwUgwzFulbAkXrKi3AX5FvWS3FFTQtLMwrqcGqAJe6o12KrObejQA== + +asynckit@^0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" + integrity sha1-x57Zf380y48robyXkLzDZkdLS3k= + +auto-bind@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/auto-bind/-/auto-bind-4.0.0.tgz#e3589fc6c2da8f7ca43ba9f84fa52a744fc997fb" + integrity sha512-Hdw8qdNiqdJ8LqT0iK0sVzkFbzg6fhnQqqfWhBDxcHZvU75+B+ayzTy8x+k5Ix0Y92XOhOUlx74ps+bA6BeYMQ== + +aws-sign2@~0.7.0: + version "0.7.0" + resolved "https://registry.yarnpkg.com/aws-sign2/-/aws-sign2-0.7.0.tgz#b46e890934a9591f2d2f6f86d7e6a9f1b3fe76a8" + integrity sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg= + +aws4@^1.8.0: + version "1.11.0" + resolved "https://registry.yarnpkg.com/aws4/-/aws4-1.11.0.tgz#d61f46d83b2519250e2784daf5b09479a8b41c59" + integrity sha512-xh1Rl34h6Fi1DC2WWKfxUTVqRsNnr6LsKz2+hfwDxQJWmrx8+c7ylaqBMcHfl1U1r2dsifOvKX3LQuLNZ+XSvA== + +balanced-match@^1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee" + integrity sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw== + +base64-js@^1.3.1: + version "1.5.1" + resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.5.1.tgz#1b1b440160a5bf7ad40b650f095963481903930a" + integrity sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA== + +basic-auth-parser@0.0.2: + version "0.0.2" + resolved "https://registry.yarnpkg.com/basic-auth-parser/-/basic-auth-parser-0.0.2.tgz#ce9e71a77f23c1279eecd2659b2a46244c156e41" + integrity sha1-zp5xp38jwSee7NJlmypGJEwVbkE= + +bcrypt-pbkdf@^1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz#a4301d389b6a43f9b67ff3ca11a3f6637e360e9e" + integrity sha1-pDAdOJtqQ/m2f/PKEaP2Y342Dp4= + dependencies: + tweetnacl "^0.14.3" + +binary-extensions@^2.0.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.2.0.tgz#75f502eeaf9ffde42fc98829645be4ea76bd9e2d" + integrity sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA== + +bind-obj-methods@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/bind-obj-methods/-/bind-obj-methods-3.0.0.tgz#65b66544d9d668d80dfefe2089dd347ad1dbcaed" + integrity sha512-nLEaaz3/sEzNSyPWRsN9HNsqwk1AUyECtGj+XwGdIi3xABnEqecvXtIJ0wehQXuuER5uZ/5fTs2usONgYjG+iw== + +bl@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/bl/-/bl-4.1.0.tgz#451535264182bec2fbbc83a62ab98cf11d9f7b3a" + integrity sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w== + dependencies: + buffer "^5.5.0" + inherits "^2.0.4" + readable-stream "^3.4.0" + +boxen@^4.2.0: + version "4.2.0" + resolved "https://registry.yarnpkg.com/boxen/-/boxen-4.2.0.tgz#e411b62357d6d6d36587c8ac3d5d974daa070e64" + integrity sha512-eB4uT9RGzg2odpER62bBwSLvUeGC+WbRjjyyFhGsKnc8wp/m0+hQsMUvUe3H2V0D5vw0nBdO1hCJoZo5mKeuIQ== + dependencies: + ansi-align "^3.0.0" + camelcase "^5.3.1" + chalk "^3.0.0" + cli-boxes "^2.2.0" + string-width "^4.1.0" + term-size "^2.1.0" + type-fest "^0.8.1" + widest-line "^3.1.0" + +brace-expansion@^1.1.7: + version "1.1.11" + resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd" + integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA== + dependencies: + balanced-match "^1.0.0" + concat-map "0.0.1" + +braces@^3.0.1, braces@~3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.2.tgz#3454e1a462ee8d599e236df336cd9ea4f8afe107" + integrity sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A== + dependencies: + fill-range "^7.0.1" + +browserslist@^4.16.6: + version "4.16.7" + resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.16.7.tgz#108b0d1ef33c4af1b587c54f390e7041178e4335" + integrity sha512-7I4qVwqZltJ7j37wObBe3SoTz+nS8APaNcrBOlgoirb6/HbEU2XxW/LpUDTCngM6iauwFqmRTuOMfyKnFGY5JA== + dependencies: + caniuse-lite "^1.0.30001248" + colorette "^1.2.2" + electron-to-chromium "^1.3.793" + escalade "^3.1.1" + node-releases "^1.1.73" + +buffer-from@^1.0.0: + version "1.1.2" + resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.2.tgz#2b146a6fd72e80b4f55d255f35ed59a3a9a41bd5" + integrity sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ== + +buffer@^5.5.0: + version "5.7.1" + resolved "https://registry.yarnpkg.com/buffer/-/buffer-5.7.1.tgz#ba62e7c13133053582197160851a8f648e99eed0" + integrity sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ== + dependencies: + base64-js "^1.3.1" + ieee754 "^1.1.13" + +cacheable-request@^6.0.0: + version "6.1.0" + resolved "https://registry.yarnpkg.com/cacheable-request/-/cacheable-request-6.1.0.tgz#20ffb8bd162ba4be11e9567d823db651052ca912" + integrity sha512-Oj3cAGPCqOZX7Rz64Uny2GYAZNliQSqfbePrgAQ1wKAihYmCUnraBtJtKcGR4xz7wF+LoJC+ssFZvv5BgF9Igg== + dependencies: + clone-response "^1.0.2" + get-stream "^5.1.0" + http-cache-semantics "^4.0.0" + keyv "^3.0.0" + lowercase-keys "^2.0.0" + normalize-url "^4.1.0" + responselike "^1.0.2" + +caching-transform@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/caching-transform/-/caching-transform-4.0.0.tgz#00d297a4206d71e2163c39eaffa8157ac0651f0f" + integrity sha512-kpqOvwXnjjN44D89K5ccQC+RUrsy7jB/XLlRrx0D7/2HNcTPqzsb6XgYoErwko6QsV184CA2YgS1fxDiiDZMWA== + dependencies: + hasha "^5.0.0" + make-dir "^3.0.0" + package-hash "^4.0.0" + write-file-atomic "^3.0.0" + +call-bind@^1.0.0, call-bind@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/call-bind/-/call-bind-1.0.2.tgz#b1d4e89e688119c3c9a903ad30abb2f6a919be3c" + integrity sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA== + dependencies: + function-bind "^1.1.1" + get-intrinsic "^1.0.2" + +caller-callsite@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/caller-callsite/-/caller-callsite-2.0.0.tgz#847e0fce0a223750a9a027c54b33731ad3154134" + integrity sha1-hH4PzgoiN1CpoCfFSzNzGtMVQTQ= + dependencies: + callsites "^2.0.0" + +caller-path@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/caller-path/-/caller-path-2.0.0.tgz#468f83044e369ab2010fac5f06ceee15bb2cb1f4" + integrity sha1-Ro+DBE42mrIBD6xfBs7uFbsssfQ= + dependencies: + caller-callsite "^2.0.0" + +callsites@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/callsites/-/callsites-2.0.0.tgz#06eb84f00eea413da86affefacbffb36093b3c50" + integrity sha1-BuuE8A7qQT2oav/vrL/7Ngk7PFA= + +callsites@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/callsites/-/callsites-3.1.0.tgz#b3630abd8943432f54b3f0519238e33cd7df2f73" + integrity sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ== + +camelcase-keys@^6.2.2: + version "6.2.2" + resolved "https://registry.yarnpkg.com/camelcase-keys/-/camelcase-keys-6.2.2.tgz#5e755d6ba51aa223ec7d3d52f25778210f9dc3c0" + integrity sha512-YrwaA0vEKazPBkn0ipTiMpSajYDSe+KjQfrjhcBMxJt/znbvlHd8Pw/Vamaz5EB4Wfhs3SUR3Z9mwRu/P3s3Yg== + dependencies: + camelcase "^5.3.1" + map-obj "^4.0.0" + quick-lru "^4.0.1" + +camelcase@5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-5.0.0.tgz#03295527d58bd3cd4aa75363f35b2e8d97be2f42" + integrity sha512-faqwZqnWxbxn+F1d399ygeamQNy3lPp/H9H6rNrqYh4FSVCtcY+3cub1MxA8o9mDd55mM8Aghuu/kuyYA6VTsA== + +camelcase@^5.0.0, camelcase@^5.3.1: + version "5.3.1" + resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-5.3.1.tgz#e3c9b31569e106811df242f715725a1f4c494320" + integrity sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg== + +caniuse-lite@^1.0.30001248: + version "1.0.30001249" + resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001249.tgz#90a330057f8ff75bfe97a94d047d5e14fabb2ee8" + integrity sha512-vcX4U8lwVXPdqzPWi6cAJ3FnQaqXbBqy/GZseKNQzRj37J7qZdGcBtxq/QLFNLLlfsoXLUdHw8Iwenri86Tagw== + +cardinal@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/cardinal/-/cardinal-2.1.1.tgz#7cc1055d822d212954d07b085dea251cc7bc5505" + integrity sha1-fMEFXYItISlU0HsIXeolHMe8VQU= + dependencies: + ansicolors "~0.3.2" + redeyed "~2.1.0" + +caseless@~0.12.0: + version "0.12.0" + resolved "https://registry.yarnpkg.com/caseless/-/caseless-0.12.0.tgz#1b681c21ff84033c826543090689420d187151dc" + integrity sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw= + +chalk@2.4.2, chalk@^2.0.0, chalk@^2.4.1: + version "2.4.2" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" + integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== + dependencies: + ansi-styles "^3.2.1" + escape-string-regexp "^1.0.5" + supports-color "^5.3.0" + +chalk@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-3.0.0.tgz#3f73c2bf526591f574cc492c51e2456349f844e4" + integrity sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg== + dependencies: + ansi-styles "^4.1.0" + supports-color "^7.1.0" + +chalk@^4.0.0, chalk@^4.1.0: + version "4.1.2" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.2.tgz#aac4e2b7734a740867aeb16bf02aad556a1e7a01" + integrity sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA== + dependencies: + ansi-styles "^4.1.0" + supports-color "^7.1.0" + +chokidar@^3.3.0: + version "3.5.2" + resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-3.5.2.tgz#dba3976fcadb016f66fd365021d91600d01c1e75" + integrity sha512-ekGhOnNVPgT77r4K/U3GDhu+FQ2S8TnK/s2KbIGXi0SZWuwkZ2QNyfWdZW+TVfn84DpEP7rLeCt2UI6bJ8GwbQ== + dependencies: + anymatch "~3.1.2" + braces "~3.0.2" + glob-parent "~5.1.2" + is-binary-path "~2.1.0" + is-glob "~4.0.1" + normalize-path "~3.0.0" + readdirp "~3.6.0" + optionalDependencies: + fsevents "~2.3.2" + +ci-info@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-2.0.0.tgz#67a9e964be31a51e15e5010d58e6f12834002f46" + integrity sha512-5tK7EtrZ0N+OLFMthtqOj4fI2Jeb88C4CAZPu25LDVUgXJ0A3Js4PMGqrn0JU1W0Mh1/Z8wZzYPxqUrXeBboCQ== + +clean-stack@^2.0.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/clean-stack/-/clean-stack-2.2.0.tgz#ee8472dbb129e727b31e8a10a427dee9dfe4008b" + integrity sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A== + +cli-boxes@^2.2.0: + version "2.2.1" + resolved "https://registry.yarnpkg.com/cli-boxes/-/cli-boxes-2.2.1.tgz#ddd5035d25094fce220e9cab40a45840a440318f" + integrity sha512-y4coMcylgSCdVinjiDBuR8PCC2bLjyGTwEmPb9NHR/QaNU6EUOXcTY/s6VjGMD6ENSEaeQYHCY0GNGS5jfMwPw== + +cli-cursor@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/cli-cursor/-/cli-cursor-2.1.0.tgz#b35dac376479facc3e94747d41d0d0f5238ffcb5" + integrity sha1-s12sN2R5+sw+lHR9QdDQ9SOP/LU= + dependencies: + restore-cursor "^2.0.0" + +cli-cursor@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/cli-cursor/-/cli-cursor-3.1.0.tgz#264305a7ae490d1d03bf0c9ba7c925d1753af307" + integrity sha512-I/zHAwsKf9FqGoXM4WWRACob9+SNukZTd94DWF57E4toouRulbCxcUh6RKUEOQlYTHJnzkPMySvPNaaSLNfLZw== + dependencies: + restore-cursor "^3.1.0" + +cli-spinners@^2.5.0: + version "2.6.0" + resolved "https://registry.yarnpkg.com/cli-spinners/-/cli-spinners-2.6.0.tgz#36c7dc98fb6a9a76bd6238ec3f77e2425627e939" + integrity sha512-t+4/y50K/+4xcCRosKkA7W4gTr1MySvLV0q+PxmG7FJ5g+66ChKurYjxBCjHggHH3HA5Hh9cy+lcUGWDqVH+4Q== + +cli-truncate@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/cli-truncate/-/cli-truncate-2.1.0.tgz#c39e28bf05edcde5be3b98992a22deed5a2b93c7" + integrity sha512-n8fOixwDD6b/ObinzTrp1ZKFzbgvKZvuz/TvejnLn1aQfC6r52XEx85FmuC+3HI+JM7coBRXUvNqEU2PHVrHpg== + dependencies: + slice-ansi "^3.0.0" + string-width "^4.2.0" + +cliui@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/cliui/-/cliui-4.1.0.tgz#348422dbe82d800b3022eef4f6ac10bf2e4d1b49" + integrity sha512-4FG+RSG9DL7uEwRUZXZn3SS34DiDPfzP0VOiEwtUWlE+AR2EIg+hSyvrIgUUfhdgR/UkAeW2QHgeP+hWrXs7jQ== + dependencies: + string-width "^2.1.1" + strip-ansi "^4.0.0" + wrap-ansi "^2.0.0" + +cliui@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/cliui/-/cliui-6.0.0.tgz#511d702c0c4e41ca156d7d0e96021f23e13225b1" + integrity sha512-t6wbgtoCXvAzst7QgXxJYqPt0usEfbgQdftEPbLL/cvv6HPE5VgvqCuAIDR0NgU52ds6rFwqrgakNLrHEjCbrQ== + dependencies: + string-width "^4.2.0" + strip-ansi "^6.0.0" + wrap-ansi "^6.2.0" + +clone-response@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/clone-response/-/clone-response-1.0.2.tgz#d1dc973920314df67fbeb94223b4ee350239e96b" + integrity sha1-0dyXOSAxTfZ/vrlCI7TuNQI56Ws= + dependencies: + mimic-response "^1.0.0" + +clone@^1.0.2: + version "1.0.4" + resolved "https://registry.yarnpkg.com/clone/-/clone-1.0.4.tgz#da309cc263df15994c688ca902179ca3c7cd7c7e" + integrity sha1-2jCcwmPfFZlMaIypAheco8fNfH4= + +code-point-at@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/code-point-at/-/code-point-at-1.1.0.tgz#0d070b4d043a5bea33a2f1a40e2edb3d9a4ccf77" + integrity sha1-DQcLTQQ6W+ozovGkDi7bPZpMz3c= + +color-convert@^1.9.0: + version "1.9.3" + resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8" + integrity sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg== + dependencies: + color-name "1.1.3" + +color-convert@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-2.0.1.tgz#72d3a68d598c9bdb3af2ad1e84f21d896abd4de3" + integrity sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ== + dependencies: + color-name "~1.1.4" + +color-name@1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25" + integrity sha1-p9BVi9icQveV3UIyj3QIMcpTvCU= + +color-name@~1.1.4: + version "1.1.4" + resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2" + integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== + +color-support@^1.1.0: + version "1.1.3" + resolved "https://registry.yarnpkg.com/color-support/-/color-support-1.1.3.tgz#93834379a1cc9a0c61f82f52f0d04322251bd5a2" + integrity sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg== + +colorette@^1.2.2: + version "1.3.0" + resolved "https://registry.yarnpkg.com/colorette/-/colorette-1.3.0.tgz#ff45d2f0edb244069d3b772adeb04fed38d0a0af" + integrity sha512-ecORCqbSFP7Wm8Y6lyqMJjexBQqXSF7SSeaTyGGphogUjBlFP9m9o08wy86HL2uB7fMTxtOUzLMk7ogKcxMg1w== + +combined-stream@^1.0.6, combined-stream@~1.0.6: + version "1.0.8" + resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f" + integrity sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg== + dependencies: + delayed-stream "~1.0.0" + +commondir@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/commondir/-/commondir-1.0.1.tgz#ddd800da0c66127393cca5950ea968a3aaf1253b" + integrity sha1-3dgA2gxmEnOTzKWVDqloo6rxJTs= + +concat-map@0.0.1: + version "0.0.1" + resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" + integrity sha1-2Klr13/Wjfd5OnMDajug1UBdR3s= + +configstore@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/configstore/-/configstore-5.0.1.tgz#d365021b5df4b98cdd187d6a3b0e3f6a7cc5ed96" + integrity sha512-aMKprgk5YhBNyH25hj8wGt2+D52Sw1DRRIzqBwLp2Ya9mFmY8KPvvtvmna8SxVR9JMZ4kzMD68N22vlaRpkeFA== + dependencies: + dot-prop "^5.2.0" + graceful-fs "^4.1.2" + make-dir "^3.0.0" + unique-string "^2.0.0" + write-file-atomic "^3.0.0" + xdg-basedir "^4.0.0" + +contains-path@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/contains-path/-/contains-path-0.1.0.tgz#fe8cf184ff6670b6baef01a9d4861a5cbec4120a" + integrity sha1-/ozxhP9mcLa67wGp1IYaXL7EEgo= + +convert-hrtime@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/convert-hrtime/-/convert-hrtime-5.0.0.tgz#f2131236d4598b95de856926a67100a0a97e9fa3" + integrity sha512-lOETlkIeYSJWcbbcvjRKGxVMXJR+8+OQb/mTPbA4ObPMytYIsUbuOE0Jzy60hjARYszq1id0j8KgVhC+WGZVTg== + +convert-source-map@^1.7.0: + version "1.8.0" + resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.8.0.tgz#f3373c32d21b4d780dd8004514684fb791ca4369" + integrity sha512-+OQdjP49zViI/6i7nIJpA8rAl4sV/JdPfU9nZs3VqOwGIgizICvuN2ru6fMd+4llL0tar18UYJXfZ/TWtmhUjA== + dependencies: + safe-buffer "~5.1.1" + +core-util-is@1.0.2, core-util-is@~1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7" + integrity sha1-tf1UIgqivFq1eqtxQMlAdUUDwac= + +coveralls@^3.0.11: + version "3.1.1" + resolved "https://registry.yarnpkg.com/coveralls/-/coveralls-3.1.1.tgz#f5d4431d8b5ae69c5079c8f8ca00d64ac77cf081" + integrity sha512-+dxnG2NHncSD1NrqbSM3dn/lE57O6Qf/koe9+I7c+wzkqRmEvcp0kgJdxKInzYzkICKkFMZsX3Vct3++tsF9ww== + dependencies: + js-yaml "^3.13.1" + lcov-parse "^1.0.0" + log-driver "^1.2.7" + minimist "^1.2.5" + request "^2.88.2" + +cross-spawn@^7.0.0, cross-spawn@^7.0.2: + version "7.0.3" + resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6" + integrity sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w== + dependencies: + path-key "^3.1.0" + shebang-command "^2.0.0" + which "^2.0.1" + +cross-zip@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/cross-zip/-/cross-zip-4.0.0.tgz#c29bfb2c001659a6d480ae9596f3bee83b48a230" + integrity sha512-MEzGfZo0rqE10O/B+AEcCSJLZsrWuRUvmqJTqHNqBtALhaJc3E3ixLGLJNTRzEA2K34wbmOHC4fwYs9sVsdcCA== + +crypto-random-string@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/crypto-random-string/-/crypto-random-string-2.0.0.tgz#ef2a7a966ec11083388369baa02ebead229b30d5" + integrity sha512-v1plID3y9r/lPhviJ1wrXpLeyUIGAZ2SHNYTEapm7/8A9nLPoyvVp3RK/EPFqn5kEznyWgYZNsRtYYIWbuG8KA== + +csstype@^3.0.2: + version "3.0.8" + resolved "https://registry.yarnpkg.com/csstype/-/csstype-3.0.8.tgz#d2266a792729fb227cd216fb572f43728e1ad340" + integrity sha512-jXKhWqXPmlUeoQnF/EhTtTl4C9SnrxSH/jZUih3jmO6lBKr99rP3/+FmrMj4EFpOXzMtXHAZkd3x0E6h6Fgflw== + +dashdash@^1.12.0: + version "1.14.1" + resolved "https://registry.yarnpkg.com/dashdash/-/dashdash-1.14.1.tgz#853cfa0f7cbe2fed5de20326b8dd581035f6e2f0" + integrity sha1-hTz6D3y+L+1d4gMmuN1YEDX24vA= + dependencies: + assert-plus "^1.0.0" + +debug@^2.6.9: + version "2.6.9" + resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f" + integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA== + dependencies: + ms "2.0.0" + +debug@^3.1.0, debug@^3.2.7: + version "3.2.7" + resolved "https://registry.yarnpkg.com/debug/-/debug-3.2.7.tgz#72580b7e9145fb39b6676f9c5e5fb100b934179a" + integrity sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ== + dependencies: + ms "^2.1.1" + +debug@^4.0.1, debug@^4.1.0, debug@^4.1.1, debug@^4.3.1: + version "4.3.2" + resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.2.tgz#f0a49c18ac8779e31d4a0c6029dfb76873c7428b" + integrity sha512-mOp8wKcvj7XxC78zLgw/ZA+6TSgkoE2C/ienthhRD298T7UNwAg9diBpLRxC0mOezLl4B0xV7M0cCO6P/O0Xhw== + dependencies: + ms "2.1.2" + +debuglog@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/debuglog/-/debuglog-1.0.1.tgz#aa24ffb9ac3df9a2351837cfb2d279360cd78492" + integrity sha1-qiT/uaw9+aI1GDfPstJ5NgzXhJI= + +decamelize-keys@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/decamelize-keys/-/decamelize-keys-1.1.0.tgz#d171a87933252807eb3cb61dc1c1445d078df2d9" + integrity sha1-0XGoeTMlKAfrPLYdwcFEXQeN8tk= + dependencies: + decamelize "^1.1.0" + map-obj "^1.0.0" + +decamelize@^1.1.0, decamelize@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/decamelize/-/decamelize-1.2.0.tgz#f6534d15148269b20352e7bee26f501f9a191290" + integrity sha1-9lNNFRSCabIDUue+4m9QH5oZEpA= + +decompress-response@^3.3.0: + version "3.3.0" + resolved "https://registry.yarnpkg.com/decompress-response/-/decompress-response-3.3.0.tgz#80a4dd323748384bfa248083622aedec982adff3" + integrity sha1-gKTdMjdIOEv6JICDYirt7Jgq3/M= + dependencies: + mimic-response "^1.0.0" + +dedent@^0.7.0: + version "0.7.0" + resolved "https://registry.yarnpkg.com/dedent/-/dedent-0.7.0.tgz#2495ddbaf6eb874abb0e1be9df22d2e5a544326c" + integrity sha1-JJXduvbrh0q7Dhvp3yLS5aVEMmw= + +deep-extend@^0.6.0: + version "0.6.0" + resolved "https://registry.yarnpkg.com/deep-extend/-/deep-extend-0.6.0.tgz#c4fa7c95404a17a9c3e8ca7e1537312b736330ac" + integrity sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA== + +deep-is@^0.1.3: + version "0.1.3" + resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.3.tgz#b369d6fb5dbc13eecf524f91b070feedc357cf34" + integrity sha1-s2nW+128E+7PUk+RsHD+7cNXzzQ= + +deepmerge@^4.2.2: + version "4.2.2" + resolved "https://registry.yarnpkg.com/deepmerge/-/deepmerge-4.2.2.tgz#44d2ea3679b8f4d4ffba33f03d865fc1e7bf4955" + integrity sha512-FJ3UgI4gIl+PHZm53knsuSFpE+nESMr7M4v9QcgB7S63Kj/6WqMiFQJpBBYz1Pt+66bZpP3Q7Lye0Oo9MPKEdg== + +default-require-extensions@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/default-require-extensions/-/default-require-extensions-3.0.0.tgz#e03f93aac9b2b6443fc52e5e4a37b3ad9ad8df96" + integrity sha512-ek6DpXq/SCpvjhpFsLFRVtIxJCRw6fUR42lYMVZuUMK7n8eMz4Uh5clckdBjEpLhn/gEBZo7hDJnJcwdKLKQjg== + dependencies: + strip-bom "^4.0.0" + +defaults@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/defaults/-/defaults-1.0.3.tgz#c656051e9817d9ff08ed881477f3fe4019f3ef7d" + integrity sha1-xlYFHpgX2f8I7YgUd/P+QBnz730= + dependencies: + clone "^1.0.2" + +defer-to-connect@^1.0.1: + version "1.1.3" + resolved "https://registry.yarnpkg.com/defer-to-connect/-/defer-to-connect-1.1.3.tgz#331ae050c08dcf789f8c83a7b81f0ed94f4ac591" + integrity sha512-0ISdNousHvZT2EiFlZeZAHBUvSxmKswVCEf8hW7KWgG4a8MVEu/3Vb6uWYozkjylyCxe0JBIiRB1jV45S70WVQ== + +define-properties@^1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/define-properties/-/define-properties-1.1.3.tgz#cf88da6cbee26fe6db7094f61d870cbd84cee9f1" + integrity sha512-3MqfYKj2lLzdMSf8ZIZE/V+Zuy+BgD6f164e8K2w7dgnpKArBDerGYpM46IYYcjnkdPNMjPk9A6VFB8+3SKlXQ== + dependencies: + object-keys "^1.0.12" + +delayed-stream@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" + integrity sha1-3zrhmayt+31ECqrgsp4icrJOxhk= + +dezalgo@^1.0.0, dezalgo@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/dezalgo/-/dezalgo-1.0.3.tgz#7f742de066fc748bc8db820569dddce49bf0d456" + integrity sha1-f3Qt4Gb8dIvI24IFad3c5Jvw1FY= + dependencies: + asap "^2.0.0" + wrappy "1" + +diff@^4.0.1, diff@^4.0.2: + version "4.0.2" + resolved "https://registry.yarnpkg.com/diff/-/diff-4.0.2.tgz#60f3aecb89d5fae520c11aa19efc2bb982aade7d" + integrity sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A== + +dir-glob@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/dir-glob/-/dir-glob-3.0.1.tgz#56dbf73d992a4a93ba1584f4534063fd2e41717f" + integrity sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA== + dependencies: + path-type "^4.0.0" + +doctrine@1.5.0: + version "1.5.0" + resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-1.5.0.tgz#379dce730f6166f76cefa4e6707a159b02c5a6fa" + integrity sha1-N53Ocw9hZvds76TmcHoVmwLFpvo= + dependencies: + esutils "^2.0.2" + isarray "^1.0.0" + +doctrine@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-2.1.0.tgz#5cd01fc101621b42c4cd7f5d1a66243716d3f39d" + integrity sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw== + dependencies: + esutils "^2.0.2" + +doctrine@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-3.0.0.tgz#addebead72a6574db783639dc87a121773973961" + integrity sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w== + dependencies: + esutils "^2.0.2" + +dot-prop@^5.2.0: + version "5.3.0" + resolved "https://registry.yarnpkg.com/dot-prop/-/dot-prop-5.3.0.tgz#90ccce708cd9cd82cc4dc8c3ddd9abdd55b20e88" + integrity sha512-QM8q3zDe58hqUqjraQOmzZ1LIH9SWQJTlEKCH4kJ2oQvLZk7RbQXvtDM2XEq3fwkV9CCvvH4LA0AV+ogFsBM2Q== + dependencies: + is-obj "^2.0.0" + +duplexer3@^0.1.4: + version "0.1.4" + resolved "https://registry.yarnpkg.com/duplexer3/-/duplexer3-0.1.4.tgz#ee01dd1cac0ed3cbc7fdbea37dc0a8f1ce002ce2" + integrity sha1-7gHdHKwO08vH/b6jfcCo8c4ALOI= + +ecc-jsbn@~0.1.1: + version "0.1.2" + resolved "https://registry.yarnpkg.com/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz#3a83a904e54353287874c564b7549386849a98c9" + integrity sha1-OoOpBOVDUyh4dMVkt1SThoSamMk= + dependencies: + jsbn "~0.1.0" + safer-buffer "^2.1.0" + +electron-to-chromium@^1.3.793: + version "1.3.801" + resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.3.801.tgz#f41c588e408ad1a4f794f91f38aa94a89c492f51" + integrity sha512-xapG8ekC+IAHtJrGBMQSImNuN+dm+zl7UP1YbhvTkwQn8zf/yYuoxfTSAEiJ9VDD+kjvXaAhNDPSxJ+VImtAJA== + +emoji-regex@^7.0.1: + version "7.0.3" + resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-7.0.3.tgz#933a04052860c85e83c122479c4748a8e4c72156" + integrity sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA== + +emoji-regex@^8.0.0: + version "8.0.0" + resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-8.0.0.tgz#e818fd69ce5ccfcb404594f842963bf53164cc37" + integrity sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A== + +end-of-stream@^1.1.0: + version "1.4.4" + resolved "https://registry.yarnpkg.com/end-of-stream/-/end-of-stream-1.4.4.tgz#5ae64a5f45057baf3626ec14da0ca5e4b2431eb0" + integrity sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q== + dependencies: + once "^1.4.0" + +enquirer@^2.3.5: + version "2.3.6" + resolved "https://registry.yarnpkg.com/enquirer/-/enquirer-2.3.6.tgz#2a7fe5dd634a1e4125a975ec994ff5456dc3734d" + integrity sha512-yjNnPr315/FjS4zIsUxYguYUPP2e1NK4d7E7ZOLiyYCcbFBiTMyID+2wvm2w6+pZ/odMA7cRkjhsPbltwBOrLg== + dependencies: + ansi-colors "^4.1.1" + +error-ex@^1.2.0, error-ex@^1.3.1: + version "1.3.2" + resolved "https://registry.yarnpkg.com/error-ex/-/error-ex-1.3.2.tgz#b4ac40648107fdcdcfae242f428bea8a14d4f1bf" + integrity sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g== + dependencies: + is-arrayish "^0.2.1" + +es-abstract@^1.18.0-next.1, es-abstract@^1.18.0-next.2, es-abstract@^1.18.2: + version "1.18.5" + resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.18.5.tgz#9b10de7d4c206a3581fd5b2124233e04db49ae19" + integrity sha512-DDggyJLoS91CkJjgauM5c0yZMjiD1uK3KcaCeAmffGwZ+ODWzOkPN4QwRbsK5DOFf06fywmyLci3ZD8jLGhVYA== + dependencies: + call-bind "^1.0.2" + es-to-primitive "^1.2.1" + function-bind "^1.1.1" + get-intrinsic "^1.1.1" + has "^1.0.3" + has-symbols "^1.0.2" + internal-slot "^1.0.3" + is-callable "^1.2.3" + is-negative-zero "^2.0.1" + is-regex "^1.1.3" + is-string "^1.0.6" + object-inspect "^1.11.0" + object-keys "^1.1.1" + object.assign "^4.1.2" + string.prototype.trimend "^1.0.4" + string.prototype.trimstart "^1.0.4" + unbox-primitive "^1.0.1" + +es-to-primitive@^1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/es-to-primitive/-/es-to-primitive-1.2.1.tgz#e55cd4c9cdc188bcefb03b366c736323fc5c898a" + integrity sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA== + dependencies: + is-callable "^1.1.4" + is-date-object "^1.0.1" + is-symbol "^1.0.2" + +es6-error@^4.0.1: + version "4.1.1" + resolved "https://registry.yarnpkg.com/es6-error/-/es6-error-4.1.1.tgz#9e3af407459deed47e9a91f9b885a84eb05c561d" + integrity sha512-Um/+FxMr9CISWh0bi5Zv0iOD+4cFh5qLeks1qhAopKVAJw3drgKbKySikp7wGhDL0HPeaja0P5ULZrxLkniUVg== + +escalade@^3.1.1: + version "3.1.1" + resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.1.1.tgz#d8cfdc7000965c5a0174b4a82eaa5c0552742e40" + integrity sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw== + +escape-goat@^2.0.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/escape-goat/-/escape-goat-2.1.1.tgz#1b2dc77003676c457ec760b2dc68edb648188675" + integrity sha512-8/uIhbG12Csjy2JEW7D9pHbreaVaS/OpN3ycnyvElTdwM5n6GY6W6e2IPemfvGZeUMqZ9A/3GqIZMgKnBhAw/Q== + +escape-string-regexp@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" + integrity sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ= + +escape-string-regexp@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz#a30304e99daa32e23b2fd20f51babd07cffca344" + integrity sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w== + +eslint-config-standard-jsx@10.0.0: + version "10.0.0" + resolved "https://registry.yarnpkg.com/eslint-config-standard-jsx/-/eslint-config-standard-jsx-10.0.0.tgz#dc24992661325a2e480e2c3091d669f19034e18d" + integrity sha512-hLeA2f5e06W1xyr/93/QJulN/rLbUVUmqTlexv9PRKHFwEC9ffJcH2LvJhMoEqYQBEYafedgGZXH2W8NUpt5lA== + +eslint-config-standard@16.0.2: + version "16.0.2" + resolved "https://registry.yarnpkg.com/eslint-config-standard/-/eslint-config-standard-16.0.2.tgz#71e91727ac7a203782d0a5ca4d1c462d14e234f6" + integrity sha512-fx3f1rJDsl9bY7qzyX8SAtP8GBSk6MfXFaTfaGgk12aAYW4gJSyRm7dM790L6cbXv63fvjY4XeSzXnb4WM+SKw== + +eslint-formatter-pretty@^4.0.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/eslint-formatter-pretty/-/eslint-formatter-pretty-4.1.0.tgz#7a6877c14ffe2672066c853587d89603e97c7708" + integrity sha512-IsUTtGxF1hrH6lMWiSl1WbGaiP01eT6kzywdY1U+zLc0MP+nwEnUiS9UI8IaOTUhTeQJLlCEWIbXINBH4YJbBQ== + dependencies: + "@types/eslint" "^7.2.13" + ansi-escapes "^4.2.1" + chalk "^4.1.0" + eslint-rule-docs "^1.1.5" + log-symbols "^4.0.0" + plur "^4.0.0" + string-width "^4.2.0" + supports-hyperlinks "^2.0.0" + +eslint-import-resolver-node@^0.3.4: + version "0.3.5" + resolved "https://registry.yarnpkg.com/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.5.tgz#939bbb0f74e179e757ca87f7a4a890dabed18ac4" + integrity sha512-XMoPKjSpXbkeJ7ZZ9icLnJMTY5Mc1kZbCakHquaFsXPpyWOwK0TK6CODO+0ca54UoM9LKOxyUNnoVZRl8TeaAg== + dependencies: + debug "^3.2.7" + resolve "^1.20.0" + +eslint-module-utils@^2.6.0: + version "2.6.2" + resolved "https://registry.yarnpkg.com/eslint-module-utils/-/eslint-module-utils-2.6.2.tgz#94e5540dd15fe1522e8ffa3ec8db3b7fa7e7a534" + integrity sha512-QG8pcgThYOuqxupd06oYTZoNOGaUdTY1PqK+oS6ElF6vs4pBdk/aYxFVQQXzcrAqp9m7cl7lb2ubazX+g16k2Q== + dependencies: + debug "^3.2.7" + pkg-dir "^2.0.0" + +eslint-plugin-es@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/eslint-plugin-es/-/eslint-plugin-es-3.0.1.tgz#75a7cdfdccddc0589934aeeb384175f221c57893" + integrity sha512-GUmAsJaN4Fc7Gbtl8uOBlayo2DqhwWvEzykMHSCZHU3XdJ+NSzzZcVhXh3VxX5icqQ+oQdIEawXX8xkR3mIFmQ== + dependencies: + eslint-utils "^2.0.0" + regexpp "^3.0.0" + +eslint-plugin-import@~2.22.1: + version "2.22.1" + resolved "https://registry.yarnpkg.com/eslint-plugin-import/-/eslint-plugin-import-2.22.1.tgz#0896c7e6a0cf44109a2d97b95903c2bb689d7702" + integrity sha512-8K7JjINHOpH64ozkAhpT3sd+FswIZTfMZTjdx052pnWrgRCVfp8op9tbjpAk3DdUeI/Ba4C8OjdC0r90erHEOw== + dependencies: + array-includes "^3.1.1" + array.prototype.flat "^1.2.3" + contains-path "^0.1.0" + debug "^2.6.9" + doctrine "1.5.0" + eslint-import-resolver-node "^0.3.4" + eslint-module-utils "^2.6.0" + has "^1.0.3" + minimatch "^3.0.4" + object.values "^1.1.1" + read-pkg-up "^2.0.0" + resolve "^1.17.0" + tsconfig-paths "^3.9.0" + +eslint-plugin-node@~11.1.0: + version "11.1.0" + resolved "https://registry.yarnpkg.com/eslint-plugin-node/-/eslint-plugin-node-11.1.0.tgz#c95544416ee4ada26740a30474eefc5402dc671d" + integrity sha512-oUwtPJ1W0SKD0Tr+wqu92c5xuCeQqB3hSCHasn/ZgjFdA9iDGNkNf2Zi9ztY7X+hNuMib23LNGRm6+uN+KLE3g== + dependencies: + eslint-plugin-es "^3.0.0" + eslint-utils "^2.0.0" + ignore "^5.1.1" + minimatch "^3.0.4" + resolve "^1.10.1" + semver "^6.1.0" + +eslint-plugin-promise@~4.2.1: + version "4.2.1" + resolved "https://registry.yarnpkg.com/eslint-plugin-promise/-/eslint-plugin-promise-4.2.1.tgz#845fd8b2260ad8f82564c1222fce44ad71d9418a" + integrity sha512-VoM09vT7bfA7D+upt+FjeBO5eHIJQBUWki1aPvB+vbNiHS3+oGIJGIeyBtKQTME6UPXXy3vV07OL1tHd3ANuDw== + +eslint-plugin-react@~7.21.5: + version "7.21.5" + resolved "https://registry.yarnpkg.com/eslint-plugin-react/-/eslint-plugin-react-7.21.5.tgz#50b21a412b9574bfe05b21db176e8b7b3b15bff3" + integrity sha512-8MaEggC2et0wSF6bUeywF7qQ46ER81irOdWS4QWxnnlAEsnzeBevk1sWh7fhpCghPpXb+8Ks7hvaft6L/xsR6g== + dependencies: + array-includes "^3.1.1" + array.prototype.flatmap "^1.2.3" + doctrine "^2.1.0" + has "^1.0.3" + jsx-ast-utils "^2.4.1 || ^3.0.0" + object.entries "^1.1.2" + object.fromentries "^2.0.2" + object.values "^1.1.1" + prop-types "^15.7.2" + resolve "^1.18.1" + string.prototype.matchall "^4.0.2" + +eslint-rule-docs@^1.1.5: + version "1.1.231" + resolved "https://registry.yarnpkg.com/eslint-rule-docs/-/eslint-rule-docs-1.1.231.tgz#648b978bc5a1bb740be5f28d07470f0926b9cdf1" + integrity sha512-egHz9A1WG7b8CS0x1P6P/Rj5FqZOjray/VjpJa14tMZalfRKvpE2ONJ3plCM7+PcinmU4tcmbPLv0VtwzSdLVA== + +eslint-scope@^5.1.1: + version "5.1.1" + resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-5.1.1.tgz#e786e59a66cb92b3f6c1fb0d508aab174848f48c" + integrity sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw== + dependencies: + esrecurse "^4.3.0" + estraverse "^4.1.1" + +eslint-utils@^2.0.0, eslint-utils@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/eslint-utils/-/eslint-utils-2.1.0.tgz#d2de5e03424e707dc10c74068ddedae708741b27" + integrity sha512-w94dQYoauyvlDc43XnGB8lU3Zt713vNChgt4EWwhXAP2XkBvndfxF0AgIqKOOasjPIPzj9JqgwkwbCYD0/V3Zg== + dependencies: + eslint-visitor-keys "^1.1.0" + +eslint-visitor-keys@^1.1.0, eslint-visitor-keys@^1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz#30ebd1ef7c2fdff01c3a4f151044af25fab0523e" + integrity sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ== + +eslint-visitor-keys@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-2.1.0.tgz#f65328259305927392c938ed44eb0a5c9b2bd303" + integrity sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw== + +eslint@~7.13.0: + version "7.13.0" + resolved "https://registry.yarnpkg.com/eslint/-/eslint-7.13.0.tgz#7f180126c0dcdef327bfb54b211d7802decc08da" + integrity sha512-uCORMuOO8tUzJmsdRtrvcGq5qposf7Rw0LwkTJkoDbOycVQtQjmnhZSuLQnozLE4TmAzlMVV45eCHmQ1OpDKUQ== + dependencies: + "@babel/code-frame" "^7.0.0" + "@eslint/eslintrc" "^0.2.1" + ajv "^6.10.0" + chalk "^4.0.0" + cross-spawn "^7.0.2" + debug "^4.0.1" + doctrine "^3.0.0" + enquirer "^2.3.5" + eslint-scope "^5.1.1" + eslint-utils "^2.1.0" + eslint-visitor-keys "^2.0.0" + espree "^7.3.0" + esquery "^1.2.0" + esutils "^2.0.2" + file-entry-cache "^5.0.1" + functional-red-black-tree "^1.0.1" + glob-parent "^5.0.0" + globals "^12.1.0" + ignore "^4.0.6" + import-fresh "^3.0.0" + imurmurhash "^0.1.4" + is-glob "^4.0.0" + js-yaml "^3.13.1" + json-stable-stringify-without-jsonify "^1.0.1" + levn "^0.4.1" + lodash "^4.17.19" + minimatch "^3.0.4" + natural-compare "^1.4.0" + optionator "^0.9.1" + progress "^2.0.0" + regexpp "^3.1.0" + semver "^7.2.1" + strip-ansi "^6.0.0" + strip-json-comments "^3.1.0" + table "^5.2.3" + text-table "^0.2.0" + v8-compile-cache "^2.0.3" + +espree@^7.3.0: + version "7.3.1" + resolved "https://registry.yarnpkg.com/espree/-/espree-7.3.1.tgz#f2df330b752c6f55019f8bd89b7660039c1bbbb6" + integrity sha512-v3JCNCE64umkFpmkFGqzVKsOT0tN1Zr+ueqLZfpV1Ob8e+CEgPWa+OxCoGH3tnhimMKIaBm4m/vaRpJ/krRz2g== + dependencies: + acorn "^7.4.0" + acorn-jsx "^5.3.1" + eslint-visitor-keys "^1.3.0" + +esprima@^4.0.0, esprima@~4.0.0: + version "4.0.1" + resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71" + integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A== + +esquery@^1.2.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/esquery/-/esquery-1.4.0.tgz#2148ffc38b82e8c7057dfed48425b3e61f0f24a5" + integrity sha512-cCDispWt5vHHtwMY2YrAQ4ibFkAL8RbH5YGBnZBc90MolvvfkkQcJro/aZiAQUlQ3qgrYS6D6v8Gc5G5CQsc9w== + dependencies: + estraverse "^5.1.0" + +esrecurse@^4.3.0: + version "4.3.0" + resolved "https://registry.yarnpkg.com/esrecurse/-/esrecurse-4.3.0.tgz#7ad7964d679abb28bee72cec63758b1c5d2c9921" + integrity sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag== + dependencies: + estraverse "^5.2.0" + +estraverse@^4.1.1: + version "4.3.0" + resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-4.3.0.tgz#398ad3f3c5a24948be7725e83d11a7de28cdbd1d" + integrity sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw== + +estraverse@^5.1.0, estraverse@^5.2.0: + version "5.2.0" + resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-5.2.0.tgz#307df42547e6cc7324d3cf03c155d5cdb8c53880" + integrity sha512-BxbNGGNm0RyRYvUdHpIwv9IWzeM9XClbOxwoATuFdOE7ZE6wHL+HQ5T8hoPM+zHvmKzzsEqhgy0GrQ5X13afiQ== + +esutils@^2.0.2: + version "2.0.3" + resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64" + integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g== + +events-to-array@^1.0.1: + version "1.1.2" + resolved "https://registry.yarnpkg.com/events-to-array/-/events-to-array-1.1.2.tgz#2d41f563e1fe400ed4962fe1a4d5c6a7539df7f6" + integrity sha1-LUH1Y+H+QA7Uli/hpNXGp1Od9/Y= + +extend@~3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/extend/-/extend-3.0.2.tgz#f8b1136b4071fbd8eb140aff858b1019ec2915fa" + integrity sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g== + +extsprintf@1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.3.0.tgz#96918440e3041a7a414f8c52e3c574eb3c3e1e05" + integrity sha1-lpGEQOMEGnpBT4xS48V06zw+HgU= + +extsprintf@^1.2.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.4.0.tgz#e2689f8f356fad62cca65a3a91c5df5f9551692f" + integrity sha1-4mifjzVvrWLMplo6kcXfX5VRaS8= + +fast-deep-equal@^3.1.1, fast-deep-equal@^3.1.3: + version "3.1.3" + resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525" + integrity sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q== + +fast-glob@^3.1.1: + version "3.2.7" + resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.2.7.tgz#fd6cb7a2d7e9aa7a7846111e85a196d6b2f766a1" + integrity sha512-rYGMRwip6lUMvYD3BTScMwT1HtAs2d71SMv66Vrxs0IekGZEjhM0pcMfjQPnknBt2zeCwQMEupiN02ZP4DiT1Q== + dependencies: + "@nodelib/fs.stat" "^2.0.2" + "@nodelib/fs.walk" "^1.2.3" + glob-parent "^5.1.2" + merge2 "^1.3.0" + micromatch "^4.0.4" + +fast-json-stable-stringify@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz#874bf69c6f404c2b5d99c481341399fd55892633" + integrity sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw== + +fast-levenshtein@^2.0.6: + version "2.0.6" + resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917" + integrity sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc= + +fastq@^1.6.0: + version "1.11.1" + resolved "https://registry.yarnpkg.com/fastq/-/fastq-1.11.1.tgz#5d8175aae17db61947f8b162cfc7f63264d22807" + integrity sha512-HOnr8Mc60eNYl1gzwp6r5RoUyAn5/glBolUzP/Ez6IFVPMPirxn/9phgL6zhOtaTy7ISwPvQ+wT+hfcRZh/bzw== + dependencies: + reusify "^1.0.4" + +file-entry-cache@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/file-entry-cache/-/file-entry-cache-5.0.1.tgz#ca0f6efa6dd3d561333fb14515065c2fafdf439c" + integrity sha512-bCg29ictuBaKUwwArK4ouCaqDgLZcysCFLmM/Yn/FDoqndh/9vNuQfXRDvTuXKLxfD/JtZQGKFT8MGcJBK644g== + dependencies: + flat-cache "^2.0.1" + +fill-range@^7.0.1: + version "7.0.1" + resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-7.0.1.tgz#1919a6a7c75fe38b2c7c77e5198535da9acdda40" + integrity sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ== + dependencies: + to-regex-range "^5.0.1" + +find-cache-dir@^3.2.0: + version "3.3.1" + resolved "https://registry.yarnpkg.com/find-cache-dir/-/find-cache-dir-3.3.1.tgz#89b33fad4a4670daa94f855f7fbe31d6d84fe880" + integrity sha512-t2GDMt3oGC/v+BMwzmllWDuJF/xcDtE5j/fCGbqDD7OLuJkj0cfh1YSA5VKPvwMeLFLNDBkwOKZ2X85jGLVftQ== + dependencies: + commondir "^1.0.1" + make-dir "^3.0.2" + pkg-dir "^4.1.0" + +find-up@^2.0.0, find-up@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/find-up/-/find-up-2.1.0.tgz#45d1b7e506c717ddd482775a2b77920a3c0c57a7" + integrity sha1-RdG35QbHF93UgndaK3eSCjwMV6c= + dependencies: + locate-path "^2.0.0" + +find-up@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/find-up/-/find-up-3.0.0.tgz#49169f1d7993430646da61ecc5ae355c21c97b73" + integrity sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg== + dependencies: + locate-path "^3.0.0" + +find-up@^4.0.0, find-up@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/find-up/-/find-up-4.1.0.tgz#97afe7d6cdc0bc5928584b7c8d7b16e8a9aa5d19" + integrity sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw== + dependencies: + locate-path "^5.0.0" + path-exists "^4.0.0" + +findit@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/findit/-/findit-2.0.0.tgz#6509f0126af4c178551cfa99394e032e13a4d56e" + integrity sha1-ZQnwEmr0wXhVHPqZOU4DLhOk1W4= + +flat-cache@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/flat-cache/-/flat-cache-2.0.1.tgz#5d296d6f04bda44a4630a301413bdbc2ec085ec0" + integrity sha512-LoQe6yDuUMDzQAEH8sgmh4Md6oZnc/7PjtwjNFSzveXqSHt6ka9fPBuso7IGf9Rz4uqnSnWiFH2B/zj24a5ReA== + dependencies: + flatted "^2.0.0" + rimraf "2.6.3" + write "1.0.3" + +flatted@^2.0.0: + version "2.0.2" + resolved "https://registry.yarnpkg.com/flatted/-/flatted-2.0.2.tgz#4575b21e2bcee7434aa9be662f4b7b5f9c2b5138" + integrity sha512-r5wGx7YeOwNWNlCA0wQ86zKyDLMQr+/RB8xy74M4hTphfmjlijTSSXGuH8rnvKZnfT9i+75zmd8jcKdMR4O6jA== + +foreground-child@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/foreground-child/-/foreground-child-2.0.0.tgz#71b32800c9f15aa8f2f83f4a6bd9bff35d861a53" + integrity sha512-dCIq9FpEcyQyXKCkyzmlPTFNgrCzPudOe+mhvJU5zAtlBnGVy2yKxtfsxK2tQBThwq225jcvBjpw1Gr40uzZCA== + dependencies: + cross-spawn "^7.0.0" + signal-exit "^3.0.2" + +forever-agent@~0.6.1: + version "0.6.1" + resolved "https://registry.yarnpkg.com/forever-agent/-/forever-agent-0.6.1.tgz#fbc71f0c41adeb37f96c577ad1ed42d8fdacca91" + integrity sha1-+8cfDEGt6zf5bFd60e1C2P2sypE= + +form-data@~2.3.2: + version "2.3.3" + resolved "https://registry.yarnpkg.com/form-data/-/form-data-2.3.3.tgz#dcce52c05f644f298c6a7ab936bd724ceffbf3a6" + integrity sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ== + dependencies: + asynckit "^0.4.0" + combined-stream "^1.0.6" + mime-types "^2.1.12" + +from2@^2.3.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/from2/-/from2-2.3.0.tgz#8bfb5502bde4a4d36cfdeea007fcca21d7e382af" + integrity sha1-i/tVAr3kpNNs/e6gB/zKIdfjgq8= + dependencies: + inherits "^2.0.1" + readable-stream "^2.0.0" + +fromentries@^1.2.0: + version "1.3.2" + resolved "https://registry.yarnpkg.com/fromentries/-/fromentries-1.3.2.tgz#e4bca6808816bf8f93b52750f1127f5a6fd86e3a" + integrity sha512-cHEpEQHUg0f8XdtZCc2ZAhrHzKzT0MrFUTcvx+hfxYu7rGMDc5SKoXFh+n4YigxsHXRzc6OrCshdR1bWH6HHyg== + +fs-exists-cached@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/fs-exists-cached/-/fs-exists-cached-1.0.0.tgz#cf25554ca050dc49ae6656b41de42258989dcbce" + integrity sha1-zyVVTKBQ3EmuZla0HeQiWJidy84= + +fs.realpath@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" + integrity sha1-FQStJSMVjKpA20onh8sBQRmU6k8= + +fsevents@~2.3.2: + version "2.3.2" + resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.3.2.tgz#8a526f78b8fdf4623b709e0b975c52c24c02fd1a" + integrity sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA== + +function-bind@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d" + integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A== + +function-loop@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/function-loop/-/function-loop-2.0.1.tgz#799c56ced01698cf12a1b80e4802e9dafc2ebada" + integrity sha512-ktIR+O6i/4h+j/ZhZJNdzeI4i9lEPeEK6UPR2EVyTVBqOwcU3Za9xYKLH64ZR9HmcROyRrOkizNyjjtWJzDDkQ== + +functional-red-black-tree@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz#1b0ab3bd553b2a0d6399d29c0e3ea0b252078327" + integrity sha1-GwqzvVU7Kg1jmdKcDj6gslIHgyc= + +gensync@^1.0.0-beta.2: + version "1.0.0-beta.2" + resolved "https://registry.yarnpkg.com/gensync/-/gensync-1.0.0-beta.2.tgz#32a6ee76c3d7f52d46b2b1ae5d93fea8580a25e0" + integrity sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg== + +get-caller-file@^2.0.1: + version "2.0.5" + resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-2.0.5.tgz#4f94412a82db32f36e3b0b9741f8a97feb031f7e" + integrity sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg== + +get-intrinsic@^1.0.2, get-intrinsic@^1.1.0, get-intrinsic@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/get-intrinsic/-/get-intrinsic-1.1.1.tgz#15f59f376f855c446963948f0d24cd3637b4abc6" + integrity sha512-kWZrnVM42QCiEA2Ig1bG8zjoIMOgxWwYCEeNdwY6Tv/cOSeGpcoX4pXHfKUxNKVoArnrEr2e9srnAxxGIraS9Q== + dependencies: + function-bind "^1.1.1" + has "^1.0.3" + has-symbols "^1.0.1" + +get-package-type@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/get-package-type/-/get-package-type-0.1.0.tgz#8de2d803cff44df3bc6c456e6668b36c3926e11a" + integrity sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q== + +get-stdin@^8.0.0: + version "8.0.0" + resolved "https://registry.yarnpkg.com/get-stdin/-/get-stdin-8.0.0.tgz#cbad6a73feb75f6eeb22ba9e01f89aa28aa97a53" + integrity sha512-sY22aA6xchAzprjyqmSEQv4UbAAzRN0L2dQB0NlN5acTTK9Don6nhoc3eAbUnpZiCANAMfd/+40kVdKfFygohg== + +get-stream@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-4.1.0.tgz#c1b255575f3dc21d59bfc79cd3d2b46b1c3a54b5" + integrity sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w== + dependencies: + pump "^3.0.0" + +get-stream@^5.1.0: + version "5.2.0" + resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-5.2.0.tgz#4966a1795ee5ace65e706c4b7beb71257d6e22d3" + integrity sha512-nBF+F1rAZVCu/p7rjzgA+Yb4lfYXrpl7a6VmJrU8wF9I1CKvP/QwPNZHnOlwbTkY6dvtFIzFMSyQXbLoTQPRpA== + dependencies: + pump "^3.0.0" + +getpass@^0.1.1: + version "0.1.7" + resolved "https://registry.yarnpkg.com/getpass/-/getpass-0.1.7.tgz#5eff8e3e684d569ae4cb2b1282604e8ba62149fa" + integrity sha1-Xv+OPmhNVprkyysSgmBOi6YhSfo= + dependencies: + assert-plus "^1.0.0" + +glob-parent@^5.0.0, glob-parent@^5.1.2, glob-parent@~5.1.2: + version "5.1.2" + resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.2.tgz#869832c58034fe68a4093c17dc15e8340d8401c4" + integrity sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow== + dependencies: + is-glob "^4.0.1" + +glob@^7.0.5, glob@^7.1.1, glob@^7.1.3, glob@^7.1.4, glob@^7.1.6: + version "7.1.7" + resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.7.tgz#3b193e9233f01d42d0b3f78294bbeeb418f94a90" + integrity sha512-OvD9ENzPLbegENnYP5UUfJIirTg4+XwMWGaQfQTY0JenxNvvIKP3U3/tAQSPIu/lHxXYSZmpXlUHeqAIdKzBLQ== + dependencies: + fs.realpath "^1.0.0" + inflight "^1.0.4" + inherits "2" + minimatch "^3.0.4" + once "^1.3.0" + path-is-absolute "^1.0.0" + +global-dirs@^2.0.1: + version "2.1.0" + resolved "https://registry.yarnpkg.com/global-dirs/-/global-dirs-2.1.0.tgz#e9046a49c806ff04d6c1825e196c8f0091e8df4d" + integrity sha512-MG6kdOUh/xBnyo9cJFeIKkLEc1AyFq42QTU4XiX51i2NEdxLxLWXIjEjmqKeSuKR7pAZjTqUVoT2b2huxVLgYQ== + dependencies: + ini "1.3.7" + +globals@^11.1.0: + version "11.12.0" + resolved "https://registry.yarnpkg.com/globals/-/globals-11.12.0.tgz#ab8795338868a0babd8525758018c2a7eb95c42e" + integrity sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA== + +globals@^12.1.0: + version "12.4.0" + resolved "https://registry.yarnpkg.com/globals/-/globals-12.4.0.tgz#a18813576a41b00a24a97e7f815918c2e19925f8" + integrity sha512-BWICuzzDvDoH54NHKCseDanAhE3CeDorgDL5MT6LMXXj2WCnd9UC2szdk4AWLfjdgNBCXLUanXYcpBBKOSWGwg== + dependencies: + type-fest "^0.8.1" + +globby@^11.0.1: + version "11.0.4" + resolved "https://registry.yarnpkg.com/globby/-/globby-11.0.4.tgz#2cbaff77c2f2a62e71e9b2813a67b97a3a3001a5" + integrity sha512-9O4MVG9ioZJ08ffbcyVYyLOJLk5JQ688pJ4eMGLpdWLHq/Wr1D9BlriLQyL0E+jbkuePVZXYFj47QM/v093wHg== + dependencies: + array-union "^2.1.0" + dir-glob "^3.0.1" + fast-glob "^3.1.1" + ignore "^5.1.4" + merge2 "^1.3.0" + slash "^3.0.0" + +got@^9.6.0: + version "9.6.0" + resolved "https://registry.yarnpkg.com/got/-/got-9.6.0.tgz#edf45e7d67f99545705de1f7bbeeeb121765ed85" + integrity sha512-R7eWptXuGYxwijs0eV+v3o6+XH1IqVK8dJOEecQfTmkncw9AV4dcw/Dhxi8MdlqPthxxpZyizMzyg8RTmEsG+Q== + dependencies: + "@sindresorhus/is" "^0.14.0" + "@szmarczak/http-timer" "^1.1.2" + cacheable-request "^6.0.0" + decompress-response "^3.3.0" + duplexer3 "^0.1.4" + get-stream "^4.1.0" + lowercase-keys "^1.0.1" + mimic-response "^1.0.1" + p-cancelable "^1.0.0" + to-readable-stream "^1.0.0" + url-parse-lax "^3.0.0" + +graceful-fs@^4.1.15, graceful-fs@^4.1.2: + version "4.2.8" + resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.8.tgz#e412b8d33f5e006593cbd3cee6df9f2cebbe802a" + integrity sha512-qkIilPUYcNhJpd33n0GBXTB1MMPp14TxEsEs0pTrsSVucApsYzW5V+Q8Qxhik6KU3evy+qkAAowTByymK0avdg== + +har-schema@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/har-schema/-/har-schema-2.0.0.tgz#a94c2224ebcac04782a0d9035521f24735b7ec92" + integrity sha1-qUwiJOvKwEeCoNkDVSHyRzW37JI= + +har-validator@~5.1.3: + version "5.1.5" + resolved "https://registry.yarnpkg.com/har-validator/-/har-validator-5.1.5.tgz#1f0803b9f8cb20c0fa13822df1ecddb36bde1efd" + integrity sha512-nmT2T0lljbxdQZfspsno9hgrG3Uir6Ks5afism62poxqBM6sDnMEuPmzTq8XN0OEwqKLLdh1jQI3qyE66Nzb3w== + dependencies: + ajv "^6.12.3" + har-schema "^2.0.0" + +hard-rejection@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/hard-rejection/-/hard-rejection-2.1.0.tgz#1c6eda5c1685c63942766d79bb40ae773cecd883" + integrity sha512-VIZB+ibDhx7ObhAe7OVtoEbuP4h/MuOTHJ+J8h/eBXotJYl0fBgR72xDFCKgIh22OJZIOVNxBMWuhAr10r8HdA== + +has-bigints@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/has-bigints/-/has-bigints-1.0.1.tgz#64fe6acb020673e3b78db035a5af69aa9d07b113" + integrity sha512-LSBS2LjbNBTf6287JEbEzvJgftkF5qFkmCo9hDRpAzKhUOlJ+hx8dd4USs00SgsUNwc4617J9ki5YtEClM2ffA== + +has-flag@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd" + integrity sha1-tdRU3CGZriJWmfNGfloH87lVuv0= + +has-flag@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-4.0.0.tgz#944771fd9c81c81265c4d6941860da06bb59479b" + integrity sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ== + +has-symbols@^1.0.1, has-symbols@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.2.tgz#165d3070c00309752a1236a479331e3ac56f1423" + integrity sha512-chXa79rL/UC2KlX17jo3vRGz0azaWEx5tGqZg5pO3NUyEJVB17dMruQlzCCOfUvElghKcm5194+BCRvi2Rv/Gw== + +has-tostringtag@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/has-tostringtag/-/has-tostringtag-1.0.0.tgz#7e133818a7d394734f941e73c3d3f9291e658b25" + integrity sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ== + dependencies: + has-symbols "^1.0.2" + +has-yarn@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/has-yarn/-/has-yarn-2.1.0.tgz#137e11354a7b5bf11aa5cb649cf0c6f3ff2b2e77" + integrity sha512-UqBRqi4ju7T+TqGNdqAO0PaSVGsDGJUBQvk9eUWNGRY1CFGDzYhLWoM7JQEemnlvVcv/YEmc2wNW8BC24EnUsw== + +has@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/has/-/has-1.0.3.tgz#722d7cbfc1f6aa8241f16dd814e011e1f41e8796" + integrity sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw== + dependencies: + function-bind "^1.1.1" + +hasha@^5.0.0: + version "5.2.2" + resolved "https://registry.yarnpkg.com/hasha/-/hasha-5.2.2.tgz#a48477989b3b327aea3c04f53096d816d97522a1" + integrity sha512-Hrp5vIK/xr5SkeN2onO32H0MgNZ0f17HRNH39WfL0SYUNOTZ5Lz1TJ8Pajo/87dYGEFlLMm7mIc/k/s6Bvz9HQ== + dependencies: + is-stream "^2.0.0" + type-fest "^0.8.0" + +hosted-git-info@^2.1.4: + version "2.8.9" + resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-2.8.9.tgz#dffc0bf9a21c02209090f2aa69429e1414daf3f9" + integrity sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw== + +hpagent@^0.1.1: + version "0.1.2" + resolved "https://registry.yarnpkg.com/hpagent/-/hpagent-0.1.2.tgz#cab39c66d4df2d4377dbd212295d878deb9bdaa9" + integrity sha512-ePqFXHtSQWAFXYmj+JtOTHr84iNrII4/QRlAAPPE+zqnKy4xJo7Ie1Y4kC7AdB+LxLxSTTzBMASsEcy0q8YyvQ== + +html-escaper@^2.0.0: + version "2.0.2" + resolved "https://registry.yarnpkg.com/html-escaper/-/html-escaper-2.0.2.tgz#dfd60027da36a36dfcbe236262c00a5822681453" + integrity sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg== + +http-cache-semantics@^4.0.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/http-cache-semantics/-/http-cache-semantics-4.1.0.tgz#49e91c5cbf36c9b94bcfcd71c23d5249ec74e390" + integrity sha512-carPklcUh7ROWRK7Cv27RPtdhYhUsela/ue5/jKzjegVvXDqM2ILE9Q2BGn9JZJh1g87cp56su/FgQSzcWS8cQ== + +http-signature@~1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/http-signature/-/http-signature-1.2.0.tgz#9aecd925114772f3d95b65a60abb8f7c18fbace1" + integrity sha1-muzZJRFHcvPZW2WmCruPfBj7rOE= + dependencies: + assert-plus "^1.0.0" + jsprim "^1.2.2" + sshpk "^1.7.0" + +ieee754@^1.1.13: + version "1.2.1" + resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.2.1.tgz#8eb7a10a63fff25d15a57b001586d177d1b0d352" + integrity sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA== + +ignore@^4.0.6: + version "4.0.6" + resolved "https://registry.yarnpkg.com/ignore/-/ignore-4.0.6.tgz#750e3db5862087b4737ebac8207ffd1ef27b25fc" + integrity sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg== + +ignore@^5.1.1, ignore@^5.1.4: + version "5.1.8" + resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.1.8.tgz#f150a8b50a34289b33e22f5889abd4d8016f0e57" + integrity sha512-BMpfD7PpiETpBl/A6S498BaIJ6Y/ABT93ETbby2fP00v4EbvPBXWEoaR1UBPKs3iR53pJY7EtZk5KACI57i1Uw== + +import-fresh@^3.0.0, import-fresh@^3.2.1: + version "3.3.0" + resolved "https://registry.yarnpkg.com/import-fresh/-/import-fresh-3.3.0.tgz#37162c25fcb9ebaa2e6e53d5b4d88ce17d9e0c2b" + integrity sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw== + dependencies: + parent-module "^1.0.0" + resolve-from "^4.0.0" + +import-jsx@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/import-jsx/-/import-jsx-4.0.0.tgz#2f31fd8e884e14f136751448841ffd2d3144dce1" + integrity sha512-CnjJ2BZFJzbFDmYG5S47xPQjMlSbZLyLJuG4znzL4TdPtJBxHtFP1xVmR+EYX4synFSldiY3B6m00XkPM3zVnA== + dependencies: + "@babel/core" "^7.5.5" + "@babel/plugin-proposal-object-rest-spread" "^7.5.5" + "@babel/plugin-transform-destructuring" "^7.5.0" + "@babel/plugin-transform-react-jsx" "^7.3.0" + caller-path "^2.0.0" + find-cache-dir "^3.2.0" + make-dir "^3.0.2" + resolve-from "^3.0.0" + rimraf "^3.0.0" + +import-lazy@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/import-lazy/-/import-lazy-2.1.0.tgz#05698e3d45c88e8d7e9d92cb0584e77f096f3e43" + integrity sha1-BWmOPUXIjo1+nZLLBYTnfwlvPkM= + +imurmurhash@^0.1.4: + version "0.1.4" + resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea" + integrity sha1-khi5srkoojixPcT7a21XbyMUU+o= + +indent-string@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/indent-string/-/indent-string-4.0.0.tgz#624f8f4497d619b2d9768531d58f4122854d7251" + integrity sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg== + +inflight@^1.0.4: + version "1.0.6" + resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" + integrity sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk= + dependencies: + once "^1.3.0" + wrappy "1" + +inherits@2, inherits@^2.0.1, inherits@^2.0.3, inherits@^2.0.4, inherits@~2.0.3: + version "2.0.4" + resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" + integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== + +ini@1.3.7: + version "1.3.7" + resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.7.tgz#a09363e1911972ea16d7a8851005d84cf09a9a84" + integrity sha512-iKpRpXP+CrP2jyrxvg1kMUpXDyRUFDWurxbnVT1vQPx+Wz9uCYsMIqYuSBLV+PAaZG/d7kRLKRFc9oDMsH+mFQ== + +ini@~1.3.0: + version "1.3.8" + resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.8.tgz#a29da425b48806f34767a4efce397269af28432c" + integrity sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew== + +ink@^2.6.0, ink@^2.7.1: + version "2.7.1" + resolved "https://registry.yarnpkg.com/ink/-/ink-2.7.1.tgz#ff1c75b4b022924e2993af62297fa0e48e85618b" + integrity sha512-s7lJuQDJEdjqtaIWhp3KYHl6WV3J04U9zoQ6wVc+Xoa06XM27SXUY57qC5DO46xkF0CfgXMKkKNcgvSu/SAEpA== + dependencies: + ansi-escapes "^4.2.1" + arrify "^2.0.1" + auto-bind "^4.0.0" + chalk "^3.0.0" + cli-cursor "^3.1.0" + cli-truncate "^2.1.0" + is-ci "^2.0.0" + lodash.throttle "^4.1.1" + log-update "^3.0.0" + prop-types "^15.6.2" + react-reconciler "^0.24.0" + scheduler "^0.18.0" + signal-exit "^3.0.2" + slice-ansi "^3.0.0" + string-length "^3.1.0" + widest-line "^3.1.0" + wrap-ansi "^6.2.0" + yoga-layout-prebuilt "^1.9.3" + +internal-slot@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/internal-slot/-/internal-slot-1.0.3.tgz#7347e307deeea2faac2ac6205d4bc7d34967f59c" + integrity sha512-O0DB1JC/sPyZl7cIo78n5dR7eUSwwpYPiXRhTzNxZVAMUuB8vlnRFyLxdrVToks6XPLVnFfbzaVd5WLjhgg+vA== + dependencies: + get-intrinsic "^1.1.0" + has "^1.0.3" + side-channel "^1.0.4" + +into-stream@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/into-stream/-/into-stream-6.0.0.tgz#4bfc1244c0128224e18b8870e85b2de8e66c6702" + integrity sha512-XHbaOAvP+uFKUFsOgoNPRjLkwB+I22JFPFe5OjTkQ0nwgj6+pSjb4NmB6VMxaPshLiOf+zcpOCBQuLwC1KHhZA== + dependencies: + from2 "^2.3.0" + p-is-promise "^3.0.0" + +irregular-plurals@^3.2.0: + version "3.3.0" + resolved "https://registry.yarnpkg.com/irregular-plurals/-/irregular-plurals-3.3.0.tgz#67d0715d4361a60d9fd9ee80af3881c631a31ee2" + integrity sha512-MVBLKUTangM3EfRPFROhmWQQKRDsrgI83J8GS3jXy+OwYqiR2/aoWndYQ5416jLE3uaGgLH7ncme3X9y09gZ3g== + +is-arrayish@^0.2.1: + version "0.2.1" + resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d" + integrity sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0= + +is-bigint@^1.0.1: + version "1.0.3" + resolved "https://registry.yarnpkg.com/is-bigint/-/is-bigint-1.0.3.tgz#fc9d9e364210480675653ddaea0518528d49a581" + integrity sha512-ZU538ajmYJmzysE5yU4Y7uIrPQ2j704u+hXFiIPQExpqzzUbpe5jCPdTfmz7jXRxZdvjY3KZ3ZNenoXQovX+Dg== + +is-binary-path@~2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-2.1.0.tgz#ea1f7f3b80f064236e83470f86c09c254fb45b09" + integrity sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw== + dependencies: + binary-extensions "^2.0.0" + +is-boolean-object@^1.1.0: + version "1.1.2" + resolved "https://registry.yarnpkg.com/is-boolean-object/-/is-boolean-object-1.1.2.tgz#5c6dc200246dd9321ae4b885a114bb1f75f63719" + integrity sha512-gDYaKHJmnj4aWxyj6YHyXVpdQawtVLHU5cb+eztPGczf6cjuTdwve5ZIEfgXqH4e57An1D1AKf8CZ3kYrQRqYA== + dependencies: + call-bind "^1.0.2" + has-tostringtag "^1.0.0" + +is-callable@^1.1.4, is-callable@^1.2.3: + version "1.2.4" + resolved "https://registry.yarnpkg.com/is-callable/-/is-callable-1.2.4.tgz#47301d58dd0259407865547853df6d61fe471945" + integrity sha512-nsuwtxZfMX67Oryl9LCQ+upnC0Z0BgpwntpS89m1H/TLF0zNfzfLMV/9Wa/6MZsj0acpEjAO0KF1xT6ZdLl95w== + +is-ci@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/is-ci/-/is-ci-2.0.0.tgz#6bc6334181810e04b5c22b3d589fdca55026404c" + integrity sha512-YfJT7rkpQB0updsdHLGWrvhBJfcfzNNawYDNIyQXJz0IViGf75O8EBPKSdvw2rF+LGCsX4FZ8tcr3b19LcZq4w== + dependencies: + ci-info "^2.0.0" + +is-core-module@^2.2.0: + version "2.5.0" + resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.5.0.tgz#f754843617c70bfd29b7bd87327400cda5c18491" + integrity sha512-TXCMSDsEHMEEZ6eCA8rwRDbLu55MRGmrctljsBX/2v1d9/GzqHOxW5c5oPSgrUt2vBFXebu9rGqckXGPWOlYpg== + dependencies: + has "^1.0.3" + +is-date-object@^1.0.1: + version "1.0.5" + resolved "https://registry.yarnpkg.com/is-date-object/-/is-date-object-1.0.5.tgz#0841d5536e724c25597bf6ea62e1bd38298df31f" + integrity sha512-9YQaSxsAiSwcvS33MBk3wTCVnWK+HhF8VZR2jRxehM16QcVOdHqPn4VPHmRK4lSr38n9JriurInLcP90xsYNfQ== + dependencies: + has-tostringtag "^1.0.0" + +is-extglob@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2" + integrity sha1-qIwCU1eR8C7TfHahueqXc8gz+MI= + +is-fullwidth-code-point@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz#ef9e31386f031a7f0d643af82fde50c457ef00cb" + integrity sha1-754xOG8DGn8NZDr4L95QxFfvAMs= + dependencies: + number-is-nan "^1.0.0" + +is-fullwidth-code-point@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz#a3b30a5c4f199183167aaab93beefae3ddfb654f" + integrity sha1-o7MKXE8ZkYMWeqq5O+764937ZU8= + +is-fullwidth-code-point@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz#f116f8064fe90b3f7844a38997c0b75051269f1d" + integrity sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg== + +is-glob@^4.0.0, is-glob@^4.0.1, is-glob@~4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.1.tgz#7567dbe9f2f5e2467bc77ab83c4a29482407a5dc" + integrity sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg== + dependencies: + is-extglob "^2.1.1" + +is-installed-globally@^0.3.1: + version "0.3.2" + resolved "https://registry.yarnpkg.com/is-installed-globally/-/is-installed-globally-0.3.2.tgz#fd3efa79ee670d1187233182d5b0a1dd00313141" + integrity sha512-wZ8x1js7Ia0kecP/CHM/3ABkAmujX7WPvQk6uu3Fly/Mk44pySulQpnHG46OMjHGXApINnV4QhY3SWnECO2z5g== + dependencies: + global-dirs "^2.0.1" + is-path-inside "^3.0.1" + +is-interactive@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-interactive/-/is-interactive-1.0.0.tgz#cea6e6ae5c870a7b0a0004070b7b587e0252912e" + integrity sha512-2HvIEKRoqS62guEC+qBjpvRubdX910WCMuJTZ+I9yvqKU2/12eSL549HMwtabb4oupdj2sMP50k+XJfB/8JE6w== + +is-negative-zero@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/is-negative-zero/-/is-negative-zero-2.0.1.tgz#3de746c18dda2319241a53675908d8f766f11c24" + integrity sha512-2z6JzQvZRa9A2Y7xC6dQQm4FSTSTNWjKIYYTt4246eMTJmIo0Q+ZyOsU66X8lxK1AbB92dFeglPLrhwpeRKO6w== + +is-npm@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/is-npm/-/is-npm-4.0.0.tgz#c90dd8380696df87a7a6d823c20d0b12bbe3c84d" + integrity sha512-96ECIfh9xtDDlPylNPXhzjsykHsMJZ18ASpaWzQyBr4YRTcVjUvzaHayDAES2oU/3KpljhHUjtSRNiDwi0F0ig== + +is-number-object@^1.0.4: + version "1.0.6" + resolved "https://registry.yarnpkg.com/is-number-object/-/is-number-object-1.0.6.tgz#6a7aaf838c7f0686a50b4553f7e54a96494e89f0" + integrity sha512-bEVOqiRcvo3zO1+G2lVMy+gkkEm9Yh7cDMRusKKu5ZJKPUYSJwICTKZrNKHA2EbSP0Tu0+6B/emsYNHZyn6K8g== + dependencies: + has-tostringtag "^1.0.0" + +is-number@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b" + integrity sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng== + +is-obj@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/is-obj/-/is-obj-2.0.0.tgz#473fb05d973705e3fd9620545018ca8e22ef4982" + integrity sha512-drqDG3cbczxxEJRoOXcOjtdp1J/lyp1mNn0xaznRs8+muBhgQcrnbspox5X5fOw0HnMnbfDzvnEMEtqDEJEo8w== + +is-path-inside@^3.0.1: + version "3.0.3" + resolved "https://registry.yarnpkg.com/is-path-inside/-/is-path-inside-3.0.3.tgz#d231362e53a07ff2b0e0ea7fed049161ffd16283" + integrity sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ== + +is-plain-obj@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/is-plain-obj/-/is-plain-obj-1.1.0.tgz#71a50c8429dfca773c92a390a4a03b39fcd51d3e" + integrity sha1-caUMhCnfync8kqOQpKA7OfzVHT4= + +is-regex@^1.1.3: + version "1.1.4" + resolved "https://registry.yarnpkg.com/is-regex/-/is-regex-1.1.4.tgz#eef5663cd59fa4c0ae339505323df6854bb15958" + integrity sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg== + dependencies: + call-bind "^1.0.2" + has-tostringtag "^1.0.0" + +is-stream@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-2.0.1.tgz#fac1e3d53b97ad5a9d0ae9cef2389f5810a5c077" + integrity sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg== + +is-string@^1.0.5, is-string@^1.0.6: + version "1.0.7" + resolved "https://registry.yarnpkg.com/is-string/-/is-string-1.0.7.tgz#0dd12bf2006f255bb58f695110eff7491eebc0fd" + integrity sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg== + dependencies: + has-tostringtag "^1.0.0" + +is-symbol@^1.0.2, is-symbol@^1.0.3: + version "1.0.4" + resolved "https://registry.yarnpkg.com/is-symbol/-/is-symbol-1.0.4.tgz#a6dac93b635b063ca6872236de88910a57af139c" + integrity sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg== + dependencies: + has-symbols "^1.0.2" + +is-typedarray@^1.0.0, is-typedarray@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a" + integrity sha1-5HnICFjfDBsR3dppQPlgEfzaSpo= + +is-unicode-supported@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/is-unicode-supported/-/is-unicode-supported-0.1.0.tgz#3f26c76a809593b52bfa2ecb5710ed2779b522a7" + integrity sha512-knxG2q4UC3u8stRGyAVJCOdxFmv5DZiRcdlIaAQXAbSfJya+OhopNotLQrstBhququ4ZpuKbDc/8S6mgXgPFPw== + +is-windows@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/is-windows/-/is-windows-1.0.2.tgz#d1850eb9791ecd18e6182ce12a30f396634bb19d" + integrity sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA== + +is-yarn-global@^0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/is-yarn-global/-/is-yarn-global-0.3.0.tgz#d502d3382590ea3004893746754c89139973e232" + integrity sha512-VjSeb/lHmkoyd8ryPVIKvOCn4D1koMqY+vqyjjUfc3xyKtP4dYOxM44sZrnqQSzSds3xyOrUTLTC9LVCVgLngw== + +isarray@^1.0.0, isarray@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11" + integrity sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE= + +isexe@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" + integrity sha1-6PvzdNxVb/iUehDcsFctYz8s+hA= + +isstream@~0.1.2: + version "0.1.2" + resolved "https://registry.yarnpkg.com/isstream/-/isstream-0.1.2.tgz#47e63f7af55afa6f92e1500e690eb8b8529c099a" + integrity sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo= + +istanbul-lib-coverage@^3.0.0, istanbul-lib-coverage@^3.0.0-alpha.1: + version "3.0.0" + resolved "https://registry.yarnpkg.com/istanbul-lib-coverage/-/istanbul-lib-coverage-3.0.0.tgz#f5944a37c70b550b02a78a5c3b2055b280cec8ec" + integrity sha512-UiUIqxMgRDET6eR+o5HbfRYP1l0hqkWOs7vNxC/mggutCMUIhWMm8gAHb8tHlyfD3/l6rlgNA5cKdDzEAf6hEg== + +istanbul-lib-hook@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/istanbul-lib-hook/-/istanbul-lib-hook-3.0.0.tgz#8f84c9434888cc6b1d0a9d7092a76d239ebf0cc6" + integrity sha512-Pt/uge1Q9s+5VAZ+pCo16TYMWPBIl+oaNIjgLQxcX0itS6ueeaA+pEfThZpH8WxhFgCiEb8sAJY6MdUKgiIWaQ== + dependencies: + append-transform "^2.0.0" + +istanbul-lib-instrument@^4.0.0: + version "4.0.3" + resolved "https://registry.yarnpkg.com/istanbul-lib-instrument/-/istanbul-lib-instrument-4.0.3.tgz#873c6fff897450118222774696a3f28902d77c1d" + integrity sha512-BXgQl9kf4WTCPCCpmFGoJkz/+uhvm7h7PFKUYxh7qarQd3ER33vHG//qaE8eN25l07YqZPpHXU9I09l/RD5aGQ== + dependencies: + "@babel/core" "^7.7.5" + "@istanbuljs/schema" "^0.1.2" + istanbul-lib-coverage "^3.0.0" + semver "^6.3.0" + +istanbul-lib-processinfo@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/istanbul-lib-processinfo/-/istanbul-lib-processinfo-2.0.2.tgz#e1426514662244b2f25df728e8fd1ba35fe53b9c" + integrity sha512-kOwpa7z9hme+IBPZMzQ5vdQj8srYgAtaRqeI48NGmAQ+/5yKiHLV0QbYqQpxsdEF0+w14SoB8YbnHKcXE2KnYw== + dependencies: + archy "^1.0.0" + cross-spawn "^7.0.0" + istanbul-lib-coverage "^3.0.0-alpha.1" + make-dir "^3.0.0" + p-map "^3.0.0" + rimraf "^3.0.0" + uuid "^3.3.3" + +istanbul-lib-report@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz#7518fe52ea44de372f460a76b5ecda9ffb73d8a6" + integrity sha512-wcdi+uAKzfiGT2abPpKZ0hSU1rGQjUQnLvtY5MpQ7QCTahD3VODhcu4wcfY1YtkGaDD5yuydOLINXsfbus9ROw== + dependencies: + istanbul-lib-coverage "^3.0.0" + make-dir "^3.0.0" + supports-color "^7.1.0" + +istanbul-lib-source-maps@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/istanbul-lib-source-maps/-/istanbul-lib-source-maps-4.0.0.tgz#75743ce6d96bb86dc7ee4352cf6366a23f0b1ad9" + integrity sha512-c16LpFRkR8vQXyHZ5nLpY35JZtzj1PQY1iZmesUbf1FZHbIupcWfjgOXBY9YHkLEQ6puz1u4Dgj6qmU/DisrZg== + dependencies: + debug "^4.1.1" + istanbul-lib-coverage "^3.0.0" + source-map "^0.6.1" + +istanbul-reports@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/istanbul-reports/-/istanbul-reports-3.0.2.tgz#d593210e5000683750cb09fc0644e4b6e27fd53b" + integrity sha512-9tZvz7AiR3PEDNGiV9vIouQ/EAcqMXFmkcA1CDFTwOB98OZVDL0PH9glHotf5Ugp6GCOTypfzGWI/OqjWNCRUw== + dependencies: + html-escaper "^2.0.0" + istanbul-lib-report "^3.0.0" + +jackspeak@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/jackspeak/-/jackspeak-1.4.0.tgz#4eb2c7935c5e6d28179b50829711d1372a1c9a2a" + integrity sha512-VDcSunT+wcccoG46FtzuBAyQKlzhHjli4q31e1fIHGOsRspqNUFjVzGb+7eIFDlTvqLygxapDHPHS0ouT2o/tw== + dependencies: + cliui "^4.1.0" + +"js-tokens@^3.0.0 || ^4.0.0", js-tokens@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499" + integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ== + +js-yaml@3.14.0: + version "3.14.0" + resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.14.0.tgz#a7a34170f26a21bb162424d8adacb4113a69e482" + integrity sha512-/4IbIeHcD9VMHFqDR/gQ7EdZdLimOvW2DdcxFjdyyZ9NsbS+ccrXqVWDtab/lRl5AlUqmpBx8EhPaWR+OtY17A== + dependencies: + argparse "^1.0.7" + esprima "^4.0.0" + +js-yaml@^3.13.1: + version "3.14.1" + resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.14.1.tgz#dae812fdb3825fa306609a8717383c50c36a0537" + integrity sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g== + dependencies: + argparse "^1.0.7" + esprima "^4.0.0" + +js-yaml@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-4.1.0.tgz#c1fb65f8f5017901cdd2c951864ba18458a10602" + integrity sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA== + dependencies: + argparse "^2.0.1" + +jsbn@~0.1.0: + version "0.1.1" + resolved "https://registry.yarnpkg.com/jsbn/-/jsbn-0.1.1.tgz#a5e654c2e5a2deb5f201d96cefbca80c0ef2f513" + integrity sha1-peZUwuWi3rXyAdls77yoDA7y9RM= + +jsesc@^2.5.1: + version "2.5.2" + resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-2.5.2.tgz#80564d2e483dacf6e8ef209650a67df3f0c283a4" + integrity sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA== + +json-buffer@3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/json-buffer/-/json-buffer-3.0.0.tgz#5b1f397afc75d677bde8bcfc0e47e1f9a3d9a898" + integrity sha1-Wx85evx11ne96Lz8Dkfh+aPZqJg= + +json-parse-better-errors@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/json-parse-better-errors/-/json-parse-better-errors-1.0.2.tgz#bb867cfb3450e69107c131d1c514bab3dc8bcaa9" + integrity sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw== + +json-parse-even-better-errors@^2.3.0: + version "2.3.1" + resolved "https://registry.yarnpkg.com/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz#7c47805a94319928e05777405dc12e1f7a4ee02d" + integrity sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w== + +json-schema-traverse@^0.4.1: + version "0.4.1" + resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660" + integrity sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg== + +json-schema@0.2.3: + version "0.2.3" + resolved "https://registry.yarnpkg.com/json-schema/-/json-schema-0.2.3.tgz#b480c892e59a2f05954ce727bd3f2a4e882f9e13" + integrity sha1-tIDIkuWaLwWVTOcnvT8qTogvnhM= + +json-stable-stringify-without-jsonify@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz#9db7b59496ad3f3cfef30a75142d2d930ad72651" + integrity sha1-nbe1lJatPzz+8wp1FC0tkwrXJlE= + +json-stringify-safe@~5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz#1296a2d58fd45f19a0f6ce01d65701e2c735b6eb" + integrity sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus= + +json5@^2.1.2, json5@^2.2.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/json5/-/json5-2.2.0.tgz#2dfefe720c6ba525d9ebd909950f0515316c89a3" + integrity sha512-f+8cldu7X/y7RAJurMEJmdoKXGB/X550w2Nr3tTbezL6RwEE/iMcm+tZnXeoZtKuOq6ft8+CqzEkrIgx1fPoQA== + dependencies: + minimist "^1.2.5" + +jsprim@^1.2.2: + version "1.4.1" + resolved "https://registry.yarnpkg.com/jsprim/-/jsprim-1.4.1.tgz#313e66bc1e5cc06e438bc1b7499c2e5c56acb6a2" + integrity sha1-MT5mvB5cwG5Di8G3SZwuXFastqI= + dependencies: + assert-plus "1.0.0" + extsprintf "1.3.0" + json-schema "0.2.3" + verror "1.10.0" + +"jsx-ast-utils@^2.4.1 || ^3.0.0": + version "3.2.0" + resolved "https://registry.yarnpkg.com/jsx-ast-utils/-/jsx-ast-utils-3.2.0.tgz#41108d2cec408c3453c1bbe8a4aae9e1e2bd8f82" + integrity sha512-EIsmt3O3ljsU6sot/J4E1zDRxfBNrhjyf/OKjlydwgEimQuznlM4Wv7U+ueONJMyEn1WRE0K8dhi3dVAXYT24Q== + dependencies: + array-includes "^3.1.2" + object.assign "^4.1.2" + +keyv@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/keyv/-/keyv-3.1.0.tgz#ecc228486f69991e49e9476485a5be1e8fc5c4d9" + integrity sha512-9ykJ/46SN/9KPM/sichzQ7OvXyGDYKGTaDlKMGCAlg2UK8KRy4jb0d8sFc+0Tt0YYnThq8X2RZgCg74RPxgcVA== + dependencies: + json-buffer "3.0.0" + +kind-of@^6.0.3: + version "6.0.3" + resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-6.0.3.tgz#07c05034a6c349fa06e24fa35aa76db4580ce4dd" + integrity sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw== + +latest-version@^5.0.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/latest-version/-/latest-version-5.1.0.tgz#119dfe908fe38d15dfa43ecd13fa12ec8832face" + integrity sha512-weT+r0kTkRQdCdYCNtkMwWXQTMEswKrFBkm4ckQOMVhhqhIMI1UT2hMj+1iigIhgSZm5gTmrRXBNoGUgaTY1xA== + dependencies: + package-json "^6.3.0" + +lcov-parse@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/lcov-parse/-/lcov-parse-1.0.0.tgz#eb0d46b54111ebc561acb4c408ef9363bdc8f7e0" + integrity sha1-6w1GtUER68VhrLTECO+TY73I9+A= + +leven@2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/leven/-/leven-2.1.0.tgz#c2e7a9f772094dee9d34202ae8acce4687875580" + integrity sha1-wuep93IJTe6dNCAq6KzORoeHVYA= + +levn@^0.4.1: + version "0.4.1" + resolved "https://registry.yarnpkg.com/levn/-/levn-0.4.1.tgz#ae4562c007473b932a6200d403268dd2fffc6ade" + integrity sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ== + dependencies: + prelude-ls "^1.2.1" + type-check "~0.4.0" + +libtap@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/libtap/-/libtap-1.1.1.tgz#927057f505b3641f66043d89f04bd509185ccc2b" + integrity sha512-Fye8fh1+G7E8qqmjQaY+pXGxy7HM0S6bqCCJFLa16+g2jODBByxbJFDpjbDNF69wfRVyvJ+foLZc1WTIv7dx+g== + dependencies: + async-hook-domain "^2.0.1" + bind-obj-methods "^3.0.0" + diff "^4.0.2" + function-loop "^2.0.1" + minipass "^3.1.1" + own-or "^1.0.0" + own-or-env "^1.0.1" + signal-exit "^3.0.2" + stack-utils "^2.0.1" + tap-parser "^10.0.1" + tap-yaml "^1.0.0" + tcompare "^5.0.1" + trivial-deferred "^1.0.1" + yapool "^1.0.0" + +license-checker@^25.0.1: + version "25.0.1" + resolved "https://registry.yarnpkg.com/license-checker/-/license-checker-25.0.1.tgz#4d14504478a5240a857bb3c21cd0491a00d761fa" + integrity sha512-mET5AIwl7MR2IAKYYoVBBpV0OnkKQ1xGj2IMMeEFIs42QAkEVjRtFZGWmQ28WeU7MP779iAgOaOy93Mn44mn6g== + dependencies: + chalk "^2.4.1" + debug "^3.1.0" + mkdirp "^0.5.1" + nopt "^4.0.1" + read-installed "~4.0.3" + semver "^5.5.0" + spdx-correct "^3.0.0" + spdx-expression-parse "^3.0.0" + spdx-satisfies "^4.0.0" + treeify "^1.1.0" + +lines-and-columns@^1.1.6: + version "1.1.6" + resolved "https://registry.yarnpkg.com/lines-and-columns/-/lines-and-columns-1.1.6.tgz#1c00c743b433cd0a4e80758f7b64a57440d9ff00" + integrity sha1-HADHQ7QzzQpOgHWPe2SldEDZ/wA= + +load-json-file@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/load-json-file/-/load-json-file-2.0.0.tgz#7947e42149af80d696cbf797bcaabcfe1fe29ca8" + integrity sha1-eUfkIUmvgNaWy/eXvKq8/h/inKg= + dependencies: + graceful-fs "^4.1.2" + parse-json "^2.2.0" + pify "^2.0.0" + strip-bom "^3.0.0" + +load-json-file@^5.2.0: + version "5.3.0" + resolved "https://registry.yarnpkg.com/load-json-file/-/load-json-file-5.3.0.tgz#4d3c1e01fa1c03ea78a60ac7af932c9ce53403f3" + integrity sha512-cJGP40Jc/VXUsp8/OrnyKyTZ1y6v/dphm3bioS+RrKXjK2BB6wHUd6JptZEFDGgGahMT+InnZO5i1Ei9mpC8Bw== + dependencies: + graceful-fs "^4.1.15" + parse-json "^4.0.0" + pify "^4.0.1" + strip-bom "^3.0.0" + type-fest "^0.3.0" + +locate-path@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-2.0.0.tgz#2b568b265eec944c6d9c0de9c3dbbbca0354cd8e" + integrity sha1-K1aLJl7slExtnA3pw9u7ygNUzY4= + dependencies: + p-locate "^2.0.0" + path-exists "^3.0.0" + +locate-path@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-3.0.0.tgz#dbec3b3ab759758071b58fe59fc41871af21400e" + integrity sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A== + dependencies: + p-locate "^3.0.0" + path-exists "^3.0.0" + +locate-path@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-5.0.0.tgz#1afba396afd676a6d42504d0a67a3a7eb9f62aa0" + integrity sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g== + dependencies: + p-locate "^4.1.0" + +lodash.flattendeep@^4.4.0: + version "4.4.0" + resolved "https://registry.yarnpkg.com/lodash.flattendeep/-/lodash.flattendeep-4.4.0.tgz#fb030917f86a3134e5bc9bec0d69e0013ddfedb2" + integrity sha1-+wMJF/hqMTTlvJvsDWngAT3f7bI= + +lodash.throttle@^4.1.1: + version "4.1.1" + resolved "https://registry.yarnpkg.com/lodash.throttle/-/lodash.throttle-4.1.1.tgz#c23e91b710242ac70c37f1e1cda9274cc39bf2f4" + integrity sha1-wj6RtxAkKscMN/HhzaknTMOb8vQ= + +lodash@^4.17.14, lodash@^4.17.19: + version "4.17.21" + resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c" + integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== + +log-driver@^1.2.7: + version "1.2.7" + resolved "https://registry.yarnpkg.com/log-driver/-/log-driver-1.2.7.tgz#63b95021f0702fedfa2c9bb0a24e7797d71871d8" + integrity sha512-U7KCmLdqsGHBLeWqYlFA0V0Sl6P08EE1ZrmA9cxjUE0WVqT9qnyVDPz1kzpFEP0jdJuFnasWIfSd7fsaNXkpbg== + +log-symbols@^4.0.0, log-symbols@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/log-symbols/-/log-symbols-4.1.0.tgz#3fbdbb95b4683ac9fc785111e792e558d4abd503" + integrity sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg== + dependencies: + chalk "^4.1.0" + is-unicode-supported "^0.1.0" + +log-update@^3.0.0: + version "3.4.0" + resolved "https://registry.yarnpkg.com/log-update/-/log-update-3.4.0.tgz#3b9a71e00ac5b1185cc193a36d654581c48f97b9" + integrity sha512-ILKe88NeMt4gmDvk/eb615U/IVn7K9KWGkoYbdatQ69Z65nj1ZzjM6fHXfcs0Uge+e+EGnMW7DY4T9yko8vWFg== + dependencies: + ansi-escapes "^3.2.0" + cli-cursor "^2.1.0" + wrap-ansi "^5.0.0" + +loose-envify@^1.1.0, loose-envify@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/loose-envify/-/loose-envify-1.4.0.tgz#71ee51fa7be4caec1a63839f7e682d8132d30caf" + integrity sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q== + dependencies: + js-tokens "^3.0.0 || ^4.0.0" + +lowercase-keys@^1.0.0, lowercase-keys@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/lowercase-keys/-/lowercase-keys-1.0.1.tgz#6f9e30b47084d971a7c820ff15a6c5167b74c26f" + integrity sha512-G2Lj61tXDnVFFOi8VZds+SoQjtQC3dgokKdDG2mTm1tx4m50NUHBOZSBwQQHyy0V12A0JTG4icfZQH+xPyh8VA== + +lowercase-keys@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/lowercase-keys/-/lowercase-keys-2.0.0.tgz#2603e78b7b4b0006cbca2fbcc8a3202558ac9479" + integrity sha512-tqNXrS78oMOE73NMxK4EMLQsQowWf8jKooH9g7xPavRT706R6bkQJ6DY2Te7QukaZsulxa30wQ7bk0pm4XiHmA== + +lru-cache@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-6.0.0.tgz#6d6fe6570ebd96aaf90fcad1dafa3b2566db3a94" + integrity sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA== + dependencies: + yallist "^4.0.0" + +make-dir@^3.0.0, make-dir@^3.0.2: + version "3.1.0" + resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-3.1.0.tgz#415e967046b3a7f1d185277d84aa58203726a13f" + integrity sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw== + dependencies: + semver "^6.0.0" + +map-obj@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/map-obj/-/map-obj-1.0.1.tgz#d933ceb9205d82bdcf4886f6742bdc2b4dea146d" + integrity sha1-2TPOuSBdgr3PSIb2dCvcK03qFG0= + +map-obj@^4.0.0: + version "4.2.1" + resolved "https://registry.yarnpkg.com/map-obj/-/map-obj-4.2.1.tgz#e4ea399dbc979ae735c83c863dd31bdf364277b7" + integrity sha512-+WA2/1sPmDj1dlvvJmB5G6JKfY9dpn7EVBUL06+y6PoljPkh+6V1QihwxNkbcGxCRjt2b0F9K0taiCuo7MbdFQ== + +meow@^7.0.1: + version "7.1.1" + resolved "https://registry.yarnpkg.com/meow/-/meow-7.1.1.tgz#7c01595e3d337fcb0ec4e8eed1666ea95903d306" + integrity sha512-GWHvA5QOcS412WCo8vwKDlTelGLsCGBVevQB5Kva961rmNfun0PCbv5+xta2kUMFJyR8/oWnn7ddeKdosbAPbA== + dependencies: + "@types/minimist" "^1.2.0" + camelcase-keys "^6.2.2" + decamelize-keys "^1.1.0" + hard-rejection "^2.1.0" + minimist-options "4.1.0" + normalize-package-data "^2.5.0" + read-pkg-up "^7.0.1" + redent "^3.0.0" + trim-newlines "^3.0.0" + type-fest "^0.13.1" + yargs-parser "^18.1.3" + +merge2@^1.3.0: + version "1.4.1" + resolved "https://registry.yarnpkg.com/merge2/-/merge2-1.4.1.tgz#4368892f885e907455a6fd7dc55c0c9d404990ae" + integrity sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg== + +micromatch@^4.0.4: + version "4.0.4" + resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-4.0.4.tgz#896d519dfe9db25fce94ceb7a500919bf881ebf9" + integrity sha512-pRmzw/XUcwXGpD9aI9q/0XOwLNygjETJ8y0ao0wdqprrzDa4YnxLcz7fQRZr8voh8V10kGhABbNcHVk5wHgWwg== + dependencies: + braces "^3.0.1" + picomatch "^2.2.3" + +mime-db@1.49.0: + version "1.49.0" + resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.49.0.tgz#f3dfde60c99e9cf3bc9701d687778f537001cbed" + integrity sha512-CIc8j9URtOVApSFCQIF+VBkX1RwXp/oMMOrqdyXSBXq5RWNEsRfyj1kiRnQgmNXmHxPoFIxOroKA3zcU9P+nAA== + +mime-types@^2.1.12, mime-types@~2.1.19: + version "2.1.32" + resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.32.tgz#1d00e89e7de7fe02008db61001d9e02852670fd5" + integrity sha512-hJGaVS4G4c9TSMYh2n6SQAGrC4RnfU+daP8G7cSCmaqNjiOoUY0VHCMS42pxnQmVF1GWwFhbHWn3RIxCqTmZ9A== + dependencies: + mime-db "1.49.0" + +mimic-fn@^1.0.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-1.2.0.tgz#820c86a39334640e99516928bd03fca88057d022" + integrity sha512-jf84uxzwiuiIVKiOLpfYk7N46TSy8ubTonmneY9vrpHNAnp0QBt2BxWV9dO3/j+BoVAb+a5G6YDPW3M5HOdMWQ== + +mimic-fn@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-2.1.0.tgz#7ed2c2ccccaf84d3ffcb7a69b57711fc2083401b" + integrity sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg== + +mimic-response@^1.0.0, mimic-response@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/mimic-response/-/mimic-response-1.0.1.tgz#4923538878eef42063cb8a3e3b0798781487ab1b" + integrity sha512-j5EctnkH7amfV/q5Hgmoal1g2QHFJRraOtmx0JpIqkxhBhI/lJSl1nMpQ45hVarwNETOoWEimndZ4QK0RHxuxQ== + +min-indent@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/min-indent/-/min-indent-1.0.1.tgz#a63f681673b30571fbe8bc25686ae746eefa9869" + integrity sha512-I9jwMn07Sy/IwOj3zVkVik2JTvgpaykDZEigL6Rx6N9LbMywwUSMtxET+7lVoDLLd3O3IXwJwvuuns8UB/HeAg== + +minimatch@^3.0.4: + version "3.0.4" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083" + integrity sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA== + dependencies: + brace-expansion "^1.1.7" + +minimist-options@4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/minimist-options/-/minimist-options-4.1.0.tgz#c0655713c53a8a2ebd77ffa247d342c40f010619" + integrity sha512-Q4r8ghd80yhO/0j1O3B2BjweX3fiHg9cdOwjJd2J76Q135c+NDxGCqdYKQ1SKBuFfgWbAUzBfvYjPUEeNgqN1A== + dependencies: + arrify "^1.0.1" + is-plain-obj "^1.1.0" + kind-of "^6.0.3" + +minimist@^1.2.0, minimist@^1.2.5: + version "1.2.5" + resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.5.tgz#67d66014b66a6a8aaa0c083c5fd58df4e4e97602" + integrity sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw== + +minipass@^3.0.0, minipass@^3.1.1: + version "3.1.3" + resolved "https://registry.yarnpkg.com/minipass/-/minipass-3.1.3.tgz#7d42ff1f39635482e15f9cdb53184deebd5815fd" + integrity sha512-Mgd2GdMVzY+x3IJ+oHnVM+KG3lA5c8tnabyJKmHSaG2kAGpudxuOf8ToDkhumF7UzME7DecbQE9uOZhNm7PuJg== + dependencies: + yallist "^4.0.0" + +mkdirp@^0.5.1: + version "0.5.5" + resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.5.tgz#d91cefd62d1436ca0f41620e251288d420099def" + integrity sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ== + dependencies: + minimist "^1.2.5" + +mkdirp@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-1.0.4.tgz#3eb5ed62622756d79a5f0e2a221dfebad75c2f7e" + integrity sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw== + +mri@1.1.4: + version "1.1.4" + resolved "https://registry.yarnpkg.com/mri/-/mri-1.1.4.tgz#7cb1dd1b9b40905f1fac053abe25b6720f44744a" + integrity sha512-6y7IjGPm8AzlvoUrwAaw1tLnUBudaS3752vcd8JtrpGGQn+rXIe63LFVHm/YMwtqAuh+LJPCFdlLYPWM1nYn6w== + +ms@2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8" + integrity sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g= + +ms@2.1.2: + version "2.1.2" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" + integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== + +ms@^2.1.1, ms@^2.1.2, ms@^2.1.3: + version "2.1.3" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2" + integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA== + +natural-compare@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7" + integrity sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc= + +node-fetch@^2.6.1: + version "2.6.1" + resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.1.tgz#045bd323631f76ed2e2b55573394416b639a0052" + integrity sha512-V4aYg89jEoVRxRb2fJdAg8FHvI7cEyYdVAh94HH0UIK8oJxUfkjlDQN9RbMx+bEjP7+ggMiFRprSti032Oipxw== + +node-preload@^0.2.1: + version "0.2.1" + resolved "https://registry.yarnpkg.com/node-preload/-/node-preload-0.2.1.tgz#c03043bb327f417a18fee7ab7ee57b408a144301" + integrity sha512-RM5oyBy45cLEoHqCeh+MNuFAxO0vTFBLskvQbOKnEE7YTTSN4tbN8QWDIPQ6L+WvKsB/qLEGpYe2ZZ9d4W9OIQ== + dependencies: + process-on-spawn "^1.0.0" + +node-releases@^1.1.73: + version "1.1.74" + resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-1.1.74.tgz#e5866488080ebaa70a93b91144ccde06f3c3463e" + integrity sha512-caJBVempXZPepZoZAPCWRTNxYQ+xtG/KAi4ozTA5A+nJ7IU+kLQCbqaUjb5Rwy14M9upBWiQ4NutcmW04LJSRw== + +nopt@^4.0.1: + version "4.0.3" + resolved "https://registry.yarnpkg.com/nopt/-/nopt-4.0.3.tgz#a375cad9d02fd921278d954c2254d5aa57e15e48" + integrity sha512-CvaGwVMztSMJLOeXPrez7fyfObdZqNUK1cPAEzLHrTybIua9pMdmmPR5YwtfNftIOMv3DPUhFaxsZMNTQO20Kg== + dependencies: + abbrev "1" + osenv "^0.1.4" + +normalize-package-data@^2.0.0, normalize-package-data@^2.3.2, normalize-package-data@^2.5.0: + version "2.5.0" + resolved "https://registry.yarnpkg.com/normalize-package-data/-/normalize-package-data-2.5.0.tgz#e66db1838b200c1dfc233225d12cb36520e234a8" + integrity sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA== + dependencies: + hosted-git-info "^2.1.4" + resolve "^1.10.0" + semver "2 || 3 || 4 || 5" + validate-npm-package-license "^3.0.1" + +normalize-path@^3.0.0, normalize-path@~3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65" + integrity sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA== + +normalize-url@^4.1.0: + version "4.5.1" + resolved "https://registry.yarnpkg.com/normalize-url/-/normalize-url-4.5.1.tgz#0dd90cf1288ee1d1313b87081c9a5932ee48518a" + integrity sha512-9UZCFRHQdNrfTpGg8+1INIg93B6zE0aXMVFkw1WFwvO4SlZywU6aLg5Of0Ap/PgcbSw4LNxvMWXMeugwMCX0AA== + +npm-normalize-package-bin@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/npm-normalize-package-bin/-/npm-normalize-package-bin-1.0.1.tgz#6e79a41f23fd235c0623218228da7d9c23b8f6e2" + integrity sha512-EPfafl6JL5/rU+ot6P3gRSCpPDW5VmIzX959Ob1+ySFUuuYHWHekXpwdUZcKP5C+DS4GEtdJluwBjnsNDl+fSA== + +number-is-nan@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/number-is-nan/-/number-is-nan-1.0.1.tgz#097b602b53422a522c1afb8790318336941a011d" + integrity sha1-CXtgK1NCKlIsGvuHkDGDNpQaAR0= + +nyc@^15.1.0: + version "15.1.0" + resolved "https://registry.yarnpkg.com/nyc/-/nyc-15.1.0.tgz#1335dae12ddc87b6e249d5a1994ca4bdaea75f02" + integrity sha512-jMW04n9SxKdKi1ZMGhvUTHBN0EICCRkHemEoE5jm6mTYcqcdas0ATzgUgejlQUHMvpnOZqGB5Xxsv9KxJW1j8A== + dependencies: + "@istanbuljs/load-nyc-config" "^1.0.0" + "@istanbuljs/schema" "^0.1.2" + caching-transform "^4.0.0" + convert-source-map "^1.7.0" + decamelize "^1.2.0" + find-cache-dir "^3.2.0" + find-up "^4.1.0" + foreground-child "^2.0.0" + get-package-type "^0.1.0" + glob "^7.1.6" + istanbul-lib-coverage "^3.0.0" + istanbul-lib-hook "^3.0.0" + istanbul-lib-instrument "^4.0.0" + istanbul-lib-processinfo "^2.0.2" + istanbul-lib-report "^3.0.0" + istanbul-lib-source-maps "^4.0.0" + istanbul-reports "^3.0.2" + make-dir "^3.0.0" + node-preload "^0.2.1" + p-map "^3.0.0" + process-on-spawn "^1.0.0" + resolve-from "^5.0.0" + rimraf "^3.0.0" + signal-exit "^3.0.2" + spawn-wrap "^2.0.0" + test-exclude "^6.0.0" + yargs "^15.0.2" + +oauth-sign@~0.9.0: + version "0.9.0" + resolved "https://registry.yarnpkg.com/oauth-sign/-/oauth-sign-0.9.0.tgz#47a7b016baa68b5fa0ecf3dee08a85c679ac6455" + integrity sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ== + +object-assign@^4.1.1: + version "4.1.1" + resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" + integrity sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM= + +object-inspect@^1.11.0, object-inspect@^1.9.0: + version "1.11.0" + resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.11.0.tgz#9dceb146cedd4148a0d9e51ab88d34cf509922b1" + integrity sha512-jp7ikS6Sd3GxQfZJPyH3cjcbJF6GZPClgdV+EFygjFLQ5FmW/dRUnTd9PQ9k0JhoNDabWFbpF1yCdSWCC6gexg== + +object-keys@^1.0.12, object-keys@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-1.1.1.tgz#1c47f272df277f3b1daf061677d9c82e2322c60e" + integrity sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA== + +object.assign@^4.1.2: + version "4.1.2" + resolved "https://registry.yarnpkg.com/object.assign/-/object.assign-4.1.2.tgz#0ed54a342eceb37b38ff76eb831a0e788cb63940" + integrity sha512-ixT2L5THXsApyiUPYKmW+2EHpXXe5Ii3M+f4e+aJFAHao5amFRW6J0OO6c/LU8Be47utCx2GL89hxGB6XSmKuQ== + dependencies: + call-bind "^1.0.0" + define-properties "^1.1.3" + has-symbols "^1.0.1" + object-keys "^1.1.1" + +object.entries@^1.1.2: + version "1.1.4" + resolved "https://registry.yarnpkg.com/object.entries/-/object.entries-1.1.4.tgz#43ccf9a50bc5fd5b649d45ab1a579f24e088cafd" + integrity sha512-h4LWKWE+wKQGhtMjZEBud7uLGhqyLwj8fpHOarZhD2uY3C9cRtk57VQ89ke3moByLXMedqs3XCHzyb4AmA2DjA== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.18.2" + +object.fromentries@^2.0.2: + version "2.0.4" + resolved "https://registry.yarnpkg.com/object.fromentries/-/object.fromentries-2.0.4.tgz#26e1ba5c4571c5c6f0890cef4473066456a120b8" + integrity sha512-EsFBshs5RUUpQEY1D4q/m59kMfz4YJvxuNCJcv/jWwOJr34EaVnG11ZrZa0UHB3wnzV1wx8m58T4hQL8IuNXlQ== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.18.0-next.2" + has "^1.0.3" + +object.values@^1.1.1: + version "1.1.4" + resolved "https://registry.yarnpkg.com/object.values/-/object.values-1.1.4.tgz#0d273762833e816b693a637d30073e7051535b30" + integrity sha512-TnGo7j4XSnKQoK3MfvkzqKCi0nVe/D9I9IjwTNYdb/fxYHpjrluHVOgw0AF6jrRFGMPHdfuidR09tIDiIvnaSg== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.18.2" + +once@^1.3.0, once@^1.3.1, once@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" + integrity sha1-WDsap3WWHUsROsF9nFC6753Xa9E= + dependencies: + wrappy "1" + +onetime@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/onetime/-/onetime-2.0.1.tgz#067428230fd67443b2794b22bba528b6867962d4" + integrity sha1-BnQoIw/WdEOyeUsiu6UotoZ5YtQ= + dependencies: + mimic-fn "^1.0.0" + +onetime@^5.1.0: + version "5.1.2" + resolved "https://registry.yarnpkg.com/onetime/-/onetime-5.1.2.tgz#d0e96ebb56b07476df1dd9c4806e5237985ca45e" + integrity sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg== + dependencies: + mimic-fn "^2.1.0" + +opener@^1.5.1: + version "1.5.2" + resolved "https://registry.yarnpkg.com/opener/-/opener-1.5.2.tgz#5d37e1f35077b9dcac4301372271afdeb2a13598" + integrity sha512-ur5UIdyw5Y7yEj9wLzhqXiy6GZ3Mwx0yGI+5sMn2r0N0v3cKJvUmFH5yPP+WXh9e0xfyzyJX95D8l088DNFj7A== + +optionator@^0.9.1: + version "0.9.1" + resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.9.1.tgz#4f236a6373dae0566a6d43e1326674f50c291499" + integrity sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw== + dependencies: + deep-is "^0.1.3" + fast-levenshtein "^2.0.6" + levn "^0.4.1" + prelude-ls "^1.2.1" + type-check "^0.4.0" + word-wrap "^1.2.3" + +ora@^5.4.0: + version "5.4.1" + resolved "https://registry.yarnpkg.com/ora/-/ora-5.4.1.tgz#1b2678426af4ac4a509008e5e4ac9e9959db9e18" + integrity sha512-5b6Y85tPxZZ7QytO+BQzysW31HJku27cRIlkbAXaNx+BdcVi+LlRFmVXzeF6a7JCwJpyw5c4b+YSVImQIrBpuQ== + dependencies: + bl "^4.1.0" + chalk "^4.1.0" + cli-cursor "^3.1.0" + cli-spinners "^2.5.0" + is-interactive "^1.0.0" + is-unicode-supported "^0.1.0" + log-symbols "^4.1.0" + strip-ansi "^6.0.0" + wcwidth "^1.0.1" + +os-homedir@^1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/os-homedir/-/os-homedir-1.0.2.tgz#ffbc4988336e0e833de0c168c7ef152121aa7fb3" + integrity sha1-/7xJiDNuDoM94MFox+8VISGqf7M= + +os-tmpdir@^1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/os-tmpdir/-/os-tmpdir-1.0.2.tgz#bbe67406c79aa85c5cfec766fe5734555dfa1274" + integrity sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ= + +osenv@^0.1.4: + version "0.1.5" + resolved "https://registry.yarnpkg.com/osenv/-/osenv-0.1.5.tgz#85cdfafaeb28e8677f416e287592b5f3f49ea410" + integrity sha512-0CWcCECdMVc2Rw3U5w9ZjqX6ga6ubk1xDVKxtBQPK7wis/0F2r9T6k4ydGYhecl7YUBxBVxhL5oisPsNxAPe2g== + dependencies: + os-homedir "^1.0.0" + os-tmpdir "^1.0.0" + +own-or-env@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/own-or-env/-/own-or-env-1.0.1.tgz#54ce601d3bf78236c5c65633aa1c8ec03f8007e4" + integrity sha512-y8qULRbRAlL6x2+M0vIe7jJbJx/kmUTzYonRAa2ayesR2qWLswninkVyeJe4x3IEXhdgoNodzjQRKAoEs6Fmrw== + dependencies: + own-or "^1.0.0" + +own-or@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/own-or/-/own-or-1.0.0.tgz#4e877fbeda9a2ec8000fbc0bcae39645ee8bf8dc" + integrity sha1-Tod/vtqaLsgAD7wLyuOWRe6L+Nw= + +p-cancelable@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/p-cancelable/-/p-cancelable-1.1.0.tgz#d078d15a3af409220c886f1d9a0ca2e441ab26cc" + integrity sha512-s73XxOZ4zpt1edZYZzvhqFa6uvQc1vwUa0K0BdtIZgQMAJj9IbebH+JkgKZc9h+B05PKHLOTl4ajG1BmNrVZlw== + +p-is-promise@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/p-is-promise/-/p-is-promise-3.0.0.tgz#58e78c7dfe2e163cf2a04ff869e7c1dba64a5971" + integrity sha512-Wo8VsW4IRQSKVXsJCn7TomUaVtyfjVDn3nUP7kE967BQk0CwFpdbZs0X0uk5sW9mkBa9eNM7hCMaG93WUAwxYQ== + +p-limit@^1.1.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-1.3.0.tgz#b86bd5f0c25690911c7590fcbfc2010d54b3ccb8" + integrity sha512-vvcXsLAJ9Dr5rQOPk7toZQZJApBl2K4J6dANSsEuh6QI41JYcsS/qhTGa9ErIUUgK3WNQoJYvylxvjqmiqEA9Q== + dependencies: + p-try "^1.0.0" + +p-limit@^2.0.0, p-limit@^2.2.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-2.3.0.tgz#3dd33c647a214fdfffd835933eb086da0dc21db1" + integrity sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w== + dependencies: + p-try "^2.0.0" + +p-locate@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-2.0.0.tgz#20a0103b222a70c8fd39cc2e580680f3dde5ec43" + integrity sha1-IKAQOyIqcMj9OcwuWAaA893l7EM= + dependencies: + p-limit "^1.1.0" + +p-locate@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-3.0.0.tgz#322d69a05c0264b25997d9f40cd8a891ab0064a4" + integrity sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ== + dependencies: + p-limit "^2.0.0" + +p-locate@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-4.1.0.tgz#a3428bb7088b3a60292f66919278b7c297ad4f07" + integrity sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A== + dependencies: + p-limit "^2.2.0" + +p-map@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/p-map/-/p-map-3.0.0.tgz#d704d9af8a2ba684e2600d9a215983d4141a979d" + integrity sha512-d3qXVTF/s+W+CdJ5A29wywV2n8CQQYahlgz2bFiA+4eVNJbHJodPZ+/gXwPGh0bOqA+j8S+6+ckmvLGPk1QpxQ== + dependencies: + aggregate-error "^3.0.0" + +p-try@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/p-try/-/p-try-1.0.0.tgz#cbc79cdbaf8fd4228e13f621f2b1a237c1b207b3" + integrity sha1-y8ec26+P1CKOE/Yh8rGiN8GyB7M= + +p-try@^2.0.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6" + integrity sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ== + +package-hash@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/package-hash/-/package-hash-4.0.0.tgz#3537f654665ec3cc38827387fc904c163c54f506" + integrity sha512-whdkPIooSu/bASggZ96BWVvZTRMOFxnyUG5PnTSGKoJE2gd5mbVNmR2Nj20QFzxYYgAXpoqC+AiXzl+UMRh7zQ== + dependencies: + graceful-fs "^4.1.15" + hasha "^5.0.0" + lodash.flattendeep "^4.4.0" + release-zalgo "^1.0.0" + +package-json@^6.3.0: + version "6.5.0" + resolved "https://registry.yarnpkg.com/package-json/-/package-json-6.5.0.tgz#6feedaca35e75725876d0b0e64974697fed145b0" + integrity sha512-k3bdm2n25tkyxcjSKzB5x8kfVxlMdgsbPr0GkZcwHsLpba6cBjqCt1KlcChKEvxHIcTB1FVMuwoijZ26xex5MQ== + dependencies: + got "^9.6.0" + registry-auth-token "^4.0.0" + registry-url "^5.0.0" + semver "^6.2.0" + +parent-module@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/parent-module/-/parent-module-1.0.1.tgz#691d2709e78c79fae3a156622452d00762caaaa2" + integrity sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g== + dependencies: + callsites "^3.0.0" + +parse-json@^2.2.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-2.2.0.tgz#f480f40434ef80741f8469099f8dea18f55a4dc9" + integrity sha1-9ID0BDTvgHQfhGkJn43qGPVaTck= + dependencies: + error-ex "^1.2.0" + +parse-json@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-4.0.0.tgz#be35f5425be1f7f6c747184f98a788cb99477ee0" + integrity sha1-vjX1Qlvh9/bHRxhPmKeIy5lHfuA= + dependencies: + error-ex "^1.3.1" + json-parse-better-errors "^1.0.1" + +parse-json@^5.0.0: + version "5.2.0" + resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-5.2.0.tgz#c76fc66dee54231c962b22bcc8a72cf2f99753cd" + integrity sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg== + dependencies: + "@babel/code-frame" "^7.0.0" + error-ex "^1.3.1" + json-parse-even-better-errors "^2.3.0" + lines-and-columns "^1.1.6" + +path-exists@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-3.0.0.tgz#ce0ebeaa5f78cb18925ea7d810d7b59b010fd515" + integrity sha1-zg6+ql94yxiSXqfYENe1mwEP1RU= + +path-exists@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-4.0.0.tgz#513bdbe2d3b95d7762e8c1137efa195c6c61b5b3" + integrity sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w== + +path-is-absolute@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" + integrity sha1-F0uSaHNVNP+8es5r9TpanhtcX18= + +path-key@^3.1.0: + version "3.1.1" + resolved "https://registry.yarnpkg.com/path-key/-/path-key-3.1.1.tgz#581f6ade658cbba65a0d3380de7753295054f375" + integrity sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q== + +path-parse@^1.0.6: + version "1.0.7" + resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735" + integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== + +path-type@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/path-type/-/path-type-2.0.0.tgz#f012ccb8415b7096fc2daa1054c3d72389594c73" + integrity sha1-8BLMuEFbcJb8LaoQVMPXI4lZTHM= + dependencies: + pify "^2.0.0" + +path-type@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/path-type/-/path-type-4.0.0.tgz#84ed01c0a7ba380afe09d90a8c180dcd9d03043b" + integrity sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw== + +performance-now@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/performance-now/-/performance-now-2.1.0.tgz#6309f4e0e5fa913ec1c69307ae364b4b377c9e7b" + integrity sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns= + +picomatch@^2.0.4, picomatch@^2.2.1, picomatch@^2.2.3: + version "2.3.0" + resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.3.0.tgz#f1f061de8f6a4bf022892e2d128234fb98302972" + integrity sha512-lY1Q/PiJGC2zOv/z391WOTD+Z02bCgsFfvxoXXf6h7kv9o+WmsmzYqrAwY63sNgOxE4xEdq0WyUnXfKeBrSvYw== + +pify@^2.0.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/pify/-/pify-2.3.0.tgz#ed141a6ac043a849ea588498e7dca8b15330e90c" + integrity sha1-7RQaasBDqEnqWISY59yosVMw6Qw= + +pify@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/pify/-/pify-4.0.1.tgz#4b2cd25c50d598735c50292224fd8c6df41e3231" + integrity sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g== + +pkg-conf@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/pkg-conf/-/pkg-conf-3.1.0.tgz#d9f9c75ea1bae0e77938cde045b276dac7cc69ae" + integrity sha512-m0OTbR/5VPNPqO1ph6Fqbj7Hv6QU7gR/tQW40ZqrL1rjgCU85W6C1bJn0BItuJqnR98PWzw7Z8hHeChD1WrgdQ== + dependencies: + find-up "^3.0.0" + load-json-file "^5.2.0" + +pkg-dir@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-2.0.0.tgz#f6d5d1109e19d63edf428e0bd57e12777615334b" + integrity sha1-9tXREJ4Z1j7fQo4L1X4Sd3YVM0s= + dependencies: + find-up "^2.1.0" + +pkg-dir@^4.1.0: + version "4.2.0" + resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-4.2.0.tgz#f099133df7ede422e81d1d8448270eeb3e4261f3" + integrity sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ== + dependencies: + find-up "^4.0.0" + +plur@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/plur/-/plur-4.0.0.tgz#729aedb08f452645fe8c58ef115bf16b0a73ef84" + integrity sha512-4UGewrYgqDFw9vV6zNV+ADmPAUAfJPKtGvb/VdpQAx25X5f3xXdGdyOEVFwkl8Hl/tl7+xbeHqSEM+D5/TirUg== + dependencies: + irregular-plurals "^3.2.0" + +prelude-ls@^1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.2.1.tgz#debc6489d7a6e6b0e7611888cec880337d316396" + integrity sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g== + +prepend-http@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/prepend-http/-/prepend-http-2.0.0.tgz#e92434bfa5ea8c19f41cdfd401d741a3c819d897" + integrity sha1-6SQ0v6XqjBn0HN/UAddBo8gZ2Jc= + +pretty-hrtime@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/pretty-hrtime/-/pretty-hrtime-1.0.3.tgz#b7e3ea42435a4c9b2759d99e0f201eb195802ee1" + integrity sha1-t+PqQkNaTJsnWdmeDyAesZWALuE= + +process-nextick-args@~2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.1.tgz#7820d9b16120cc55ca9ae7792680ae7dba6d7fe2" + integrity sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag== + +process-on-spawn@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/process-on-spawn/-/process-on-spawn-1.0.0.tgz#95b05a23073d30a17acfdc92a440efd2baefdc93" + integrity sha512-1WsPDsUSMmZH5LeMLegqkPDrsGgsWwk1Exipy2hvB0o/F0ASzbpIctSCcZIK1ykJvtTJULEH+20WOFjMvGnCTg== + dependencies: + fromentries "^1.2.0" + +progress@^2.0.0: + version "2.0.3" + resolved "https://registry.yarnpkg.com/progress/-/progress-2.0.3.tgz#7e8cf8d8f5b8f239c1bc68beb4eb78567d572ef8" + integrity sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA== + +prop-types@^15.6.2, prop-types@^15.7.2: + version "15.7.2" + resolved "https://registry.yarnpkg.com/prop-types/-/prop-types-15.7.2.tgz#52c41e75b8c87e72b9d9360e0206b99dcbffa6c5" + integrity sha512-8QQikdH7//R2vurIJSutZ1smHYTcLpRWEOlHnzcWHmBYrOGUysKwSsrC89BCiFj3CbrfJ/nXFdJepOVrY1GCHQ== + dependencies: + loose-envify "^1.4.0" + object-assign "^4.1.1" + react-is "^16.8.1" + +proxy@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/proxy/-/proxy-1.0.2.tgz#e0cfbe11c0a7a8b238fd2d7134de4e2867578e7f" + integrity sha512-KNac2ueWRpjbUh77OAFPZuNdfEqNynm9DD4xHT14CccGpW8wKZwEkN0yjlb7X9G9Z9F55N0Q+1z+WfgAhwYdzQ== + dependencies: + args "5.0.1" + basic-auth-parser "0.0.2" + debug "^4.1.1" + +psl@^1.1.28: + version "1.8.0" + resolved "https://registry.yarnpkg.com/psl/-/psl-1.8.0.tgz#9326f8bcfb013adcc005fdff056acce020e51c24" + integrity sha512-RIdOzyoavK+hA18OGGWDqUTsCLhtA7IcZ/6NCs4fFJaHBDab+pDDmDIByWFRQJq2Cd7r1OoQxBGKOaztq+hjIQ== + +pump@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/pump/-/pump-3.0.0.tgz#b4a2116815bde2f4e1ea602354e8c75565107a64" + integrity sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww== + dependencies: + end-of-stream "^1.1.0" + once "^1.3.1" + +punycode@^2.0.0, punycode@^2.1.0, punycode@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec" + integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A== + +pupa@^2.0.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/pupa/-/pupa-2.1.1.tgz#f5e8fd4afc2c5d97828faa523549ed8744a20d62" + integrity sha512-l1jNAspIBSFqbT+y+5FosojNpVpF94nlI+wDUpqP9enwOTfHx9f0gh5nB96vl+6yTpsJsypeNrwfzPrKuHB41A== + dependencies: + escape-goat "^2.0.0" + +qs@~6.5.2: + version "6.5.2" + resolved "https://registry.yarnpkg.com/qs/-/qs-6.5.2.tgz#cb3ae806e8740444584ef154ce8ee98d403f3e36" + integrity sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA== + +queue-microtask@^1.2.2: + version "1.2.3" + resolved "https://registry.yarnpkg.com/queue-microtask/-/queue-microtask-1.2.3.tgz#4929228bbc724dfac43e0efb058caf7b6cfb6243" + integrity sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A== + +quick-lru@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/quick-lru/-/quick-lru-4.0.1.tgz#5b8878f113a58217848c6482026c73e1ba57727f" + integrity sha512-ARhCpm70fzdcvNQfPoy49IaanKkTlRWF2JMzqhcJbhSFRZv7nPTvZJdcY7301IPmvW+/p0RgIWnQDLJxifsQ7g== + +rc@^1.2.8: + version "1.2.8" + resolved "https://registry.yarnpkg.com/rc/-/rc-1.2.8.tgz#cd924bf5200a075b83c188cd6b9e211b7fc0d3ed" + integrity sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw== + dependencies: + deep-extend "^0.6.0" + ini "~1.3.0" + minimist "^1.2.0" + strip-json-comments "~2.0.1" + +react-is@^16.8.1: + version "16.13.1" + resolved "https://registry.yarnpkg.com/react-is/-/react-is-16.13.1.tgz#789729a4dc36de2999dc156dd6c1d9c18cea56a4" + integrity sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ== + +react-reconciler@^0.24.0: + version "0.24.0" + resolved "https://registry.yarnpkg.com/react-reconciler/-/react-reconciler-0.24.0.tgz#5a396b2c2f5efe8554134a5935f49f546723f2dd" + integrity sha512-gAGnwWkf+NOTig9oOowqid9O0HjTDC+XVGBCAmJYYJ2A2cN/O4gDdIuuUQjv8A4v6GDwVfJkagpBBLW5OW9HSw== + dependencies: + loose-envify "^1.1.0" + object-assign "^4.1.1" + prop-types "^15.6.2" + scheduler "^0.18.0" + +react@^16.12.0: + version "16.14.0" + resolved "https://registry.yarnpkg.com/react/-/react-16.14.0.tgz#94d776ddd0aaa37da3eda8fc5b6b18a4c9a3114d" + integrity sha512-0X2CImDkJGApiAlcf0ODKIneSwBPhqJawOa5wCtKbu7ZECrmS26NvtSILynQ66cgkT/RJ4LidJOc3bUESwmU8g== + dependencies: + loose-envify "^1.1.0" + object-assign "^4.1.1" + prop-types "^15.6.2" + +read-installed@~4.0.3: + version "4.0.3" + resolved "https://registry.yarnpkg.com/read-installed/-/read-installed-4.0.3.tgz#ff9b8b67f187d1e4c29b9feb31f6b223acd19067" + integrity sha1-/5uLZ/GH0eTCm5/rMfayI6zRkGc= + dependencies: + debuglog "^1.0.1" + read-package-json "^2.0.0" + readdir-scoped-modules "^1.0.0" + semver "2 || 3 || 4 || 5" + slide "~1.1.3" + util-extend "^1.0.1" + optionalDependencies: + graceful-fs "^4.1.2" + +read-package-json@^2.0.0: + version "2.1.2" + resolved "https://registry.yarnpkg.com/read-package-json/-/read-package-json-2.1.2.tgz#6992b2b66c7177259feb8eaac73c3acd28b9222a" + integrity sha512-D1KmuLQr6ZSJS0tW8hf3WGpRlwszJOXZ3E8Yd/DNRaM5d+1wVRZdHlpGBLAuovjr28LbWvjpWkBHMxpRGGjzNA== + dependencies: + glob "^7.1.1" + json-parse-even-better-errors "^2.3.0" + normalize-package-data "^2.0.0" + npm-normalize-package-bin "^1.0.0" + +read-pkg-up@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/read-pkg-up/-/read-pkg-up-2.0.0.tgz#6b72a8048984e0c41e79510fd5e9fa99b3b549be" + integrity sha1-a3KoBImE4MQeeVEP1en6mbO1Sb4= + dependencies: + find-up "^2.0.0" + read-pkg "^2.0.0" + +read-pkg-up@^7.0.0, read-pkg-up@^7.0.1: + version "7.0.1" + resolved "https://registry.yarnpkg.com/read-pkg-up/-/read-pkg-up-7.0.1.tgz#f3a6135758459733ae2b95638056e1854e7ef507" + integrity sha512-zK0TB7Xd6JpCLmlLmufqykGE+/TlOePD6qKClNW7hHDKFh/J7/7gCWGR7joEQEW1bKq3a3yUZSObOoWLFQ4ohg== + dependencies: + find-up "^4.1.0" + read-pkg "^5.2.0" + type-fest "^0.8.1" + +read-pkg@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/read-pkg/-/read-pkg-2.0.0.tgz#8ef1c0623c6a6db0dc6713c4bfac46332b2368f8" + integrity sha1-jvHAYjxqbbDcZxPEv6xGMysjaPg= + dependencies: + load-json-file "^2.0.0" + normalize-package-data "^2.3.2" + path-type "^2.0.0" + +read-pkg@^5.2.0: + version "5.2.0" + resolved "https://registry.yarnpkg.com/read-pkg/-/read-pkg-5.2.0.tgz#7bf295438ca5a33e56cd30e053b34ee7250c93cc" + integrity sha512-Ug69mNOpfvKDAc2Q8DRpMjjzdtrnv9HcSMX+4VsZxD1aZ6ZzrIE7rlzXBtWTyhULSMKg076AW6WR5iZpD0JiOg== + dependencies: + "@types/normalize-package-data" "^2.4.0" + normalize-package-data "^2.5.0" + parse-json "^5.0.0" + type-fest "^0.6.0" + +readable-stream@^2.0.0: + version "2.3.7" + resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.7.tgz#1eca1cf711aef814c04f62252a36a62f6cb23b57" + integrity sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw== + dependencies: + core-util-is "~1.0.0" + inherits "~2.0.3" + isarray "~1.0.0" + process-nextick-args "~2.0.0" + safe-buffer "~5.1.1" + string_decoder "~1.1.1" + util-deprecate "~1.0.1" + +readable-stream@^3.0.0, readable-stream@^3.4.0: + version "3.6.0" + resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.0.tgz#337bbda3adc0706bd3e024426a286d4b4b2c9198" + integrity sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA== + dependencies: + inherits "^2.0.3" + string_decoder "^1.1.1" + util-deprecate "^1.0.1" + +readdir-scoped-modules@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/readdir-scoped-modules/-/readdir-scoped-modules-1.1.0.tgz#8d45407b4f870a0dcaebc0e28670d18e74514309" + integrity sha512-asaikDeqAQg7JifRsZn1NJZXo9E+VwlyCfbkZhwyISinqk5zNS6266HS5kah6P0SaQKGF6SkNnZVHUzHFYxYDw== + dependencies: + debuglog "^1.0.1" + dezalgo "^1.0.0" + graceful-fs "^4.1.2" + once "^1.3.0" + +readdirp@~3.6.0: + version "3.6.0" + resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-3.6.0.tgz#74a370bd857116e245b29cc97340cd431a02a6c7" + integrity sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA== + dependencies: + picomatch "^2.2.1" + +redent@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/redent/-/redent-3.0.0.tgz#e557b7998316bb53c9f1f56fa626352c6963059f" + integrity sha512-6tDA8g98We0zd0GvVeMT9arEOnTw9qM03L9cJXaCjrip1OO764RDBLBfrB4cwzNGDj5OA5ioymC9GkizgWJDUg== + dependencies: + indent-string "^4.0.0" + strip-indent "^3.0.0" + +redeyed@~2.1.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/redeyed/-/redeyed-2.1.1.tgz#8984b5815d99cb220469c99eeeffe38913e6cc0b" + integrity sha1-iYS1gV2ZyyIEacme7v/jiRPmzAs= + dependencies: + esprima "~4.0.0" + +regexp.prototype.flags@^1.3.1: + version "1.3.1" + resolved "https://registry.yarnpkg.com/regexp.prototype.flags/-/regexp.prototype.flags-1.3.1.tgz#7ef352ae8d159e758c0eadca6f8fcb4eef07be26" + integrity sha512-JiBdRBq91WlY7uRJ0ds7R+dU02i6LKi8r3BuQhNXn+kmeLN+EfHhfjqMRis1zJxnlu88hq/4dx0P2OP3APRTOA== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + +regexpp@^3.0.0, regexpp@^3.1.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/regexpp/-/regexpp-3.2.0.tgz#0425a2768d8f23bad70ca4b90461fa2f1213e1b2" + integrity sha512-pq2bWo9mVD43nbts2wGv17XLiNLya+GklZ8kaDLV2Z08gDCsGpnKn9BFMepvWuHCbyVvY7J5o5+BVvoQbmlJLg== + +registry-auth-token@^4.0.0: + version "4.2.1" + resolved "https://registry.yarnpkg.com/registry-auth-token/-/registry-auth-token-4.2.1.tgz#6d7b4006441918972ccd5fedcd41dc322c79b250" + integrity sha512-6gkSb4U6aWJB4SF2ZvLb76yCBjcvufXBqvvEx1HbmKPkutswjW1xNVRY0+daljIYRbogN7O0etYSlbiaEQyMyw== + dependencies: + rc "^1.2.8" + +registry-url@^5.0.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/registry-url/-/registry-url-5.1.0.tgz#e98334b50d5434b81136b44ec638d9c2009c5009" + integrity sha512-8acYXXTI0AkQv6RAOjE3vOaIXZkT9wo4LOFbBKYQEEnnMNBpKqdUrI6S4NT0KPIo/WVvJ5tE/X5LF/TQUf0ekw== + dependencies: + rc "^1.2.8" + +release-zalgo@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/release-zalgo/-/release-zalgo-1.0.0.tgz#09700b7e5074329739330e535c5a90fb67851730" + integrity sha1-CXALflB0Mpc5Mw5TXFqQ+2eFFzA= + dependencies: + es6-error "^4.0.1" + +request@^2.88.2: + version "2.88.2" + resolved "https://registry.yarnpkg.com/request/-/request-2.88.2.tgz#d73c918731cb5a87da047e207234146f664d12b3" + integrity sha512-MsvtOrfG9ZcrOwAW+Qi+F6HbD0CWXEh9ou77uOb7FM2WPhwT7smM833PzanhJLsgXjN89Ir6V2PczXNnMpwKhw== + dependencies: + aws-sign2 "~0.7.0" + aws4 "^1.8.0" + caseless "~0.12.0" + combined-stream "~1.0.6" + extend "~3.0.2" + forever-agent "~0.6.1" + form-data "~2.3.2" + har-validator "~5.1.3" + http-signature "~1.2.0" + is-typedarray "~1.0.0" + isstream "~0.1.2" + json-stringify-safe "~5.0.1" + mime-types "~2.1.19" + oauth-sign "~0.9.0" + performance-now "^2.1.0" + qs "~6.5.2" + safe-buffer "^5.1.2" + tough-cookie "~2.5.0" + tunnel-agent "^0.6.0" + uuid "^3.3.2" + +require-directory@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42" + integrity sha1-jGStX9MNqxyXbiNE/+f3kqam30I= + +require-main-filename@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/require-main-filename/-/require-main-filename-2.0.0.tgz#d0b329ecc7cc0f61649f62215be69af54aa8989b" + integrity sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg== + +resolve-from@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-3.0.0.tgz#b22c7af7d9d6881bc8b6e653335eebcb0a188748" + integrity sha1-six699nWiBvItuZTM17rywoYh0g= + +resolve-from@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-4.0.0.tgz#4abcd852ad32dd7baabfe9b40e00a36db5f392e6" + integrity sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g== + +resolve-from@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-5.0.0.tgz#c35225843df8f776df21c57557bc087e9dfdfc69" + integrity sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw== + +resolve@^1.10.0, resolve@^1.10.1, resolve@^1.17.0, resolve@^1.18.1, resolve@^1.20.0: + version "1.20.0" + resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.20.0.tgz#629a013fb3f70755d6f0b7935cc1c2c5378b1975" + integrity sha512-wENBPt4ySzg4ybFQW2TT1zMQucPK95HSh/nq2CFTZVOGut2+pQvSsgtda4d26YrYcr067wjbmzOG8byDPBX63A== + dependencies: + is-core-module "^2.2.0" + path-parse "^1.0.6" + +responselike@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/responselike/-/responselike-1.0.2.tgz#918720ef3b631c5642be068f15ade5a46f4ba1e7" + integrity sha1-kYcg7ztjHFZCvgaPFa3lpG9Loec= + dependencies: + lowercase-keys "^1.0.0" + +restore-cursor@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/restore-cursor/-/restore-cursor-2.0.0.tgz#9f7ee287f82fd326d4fd162923d62129eee0dfaf" + integrity sha1-n37ih/gv0ybU/RYpI9YhKe7g368= + dependencies: + onetime "^2.0.0" + signal-exit "^3.0.2" + +restore-cursor@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/restore-cursor/-/restore-cursor-3.1.0.tgz#39f67c54b3a7a58cea5236d95cf0034239631f7e" + integrity sha512-l+sSefzHpj5qimhFSE5a8nufZYAM3sBSVMAPtYkmC+4EH2anSGaEMXSD0izRQbu9nfyQ9y5JrVmp7E8oZrUjvA== + dependencies: + onetime "^5.1.0" + signal-exit "^3.0.2" + +reusify@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/reusify/-/reusify-1.0.4.tgz#90da382b1e126efc02146e90845a88db12925d76" + integrity sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw== + +rimraf@2.6.3: + version "2.6.3" + resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.6.3.tgz#b2d104fe0d8fb27cf9e0a1cda8262dd3833c6cab" + integrity sha512-mwqeW5XsA2qAejG46gYdENaxXjx9onRNCfn7L0duuP4hCuTIi/QO7PDK07KJfp1d+izWPrzEJDcSqBa0OZQriA== + dependencies: + glob "^7.1.3" + +rimraf@^3.0.0, rimraf@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-3.0.2.tgz#f1a5402ba6220ad52cc1282bac1ae3aa49fd061a" + integrity sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA== + dependencies: + glob "^7.1.3" + +run-parallel@^1.1.9: + version "1.2.0" + resolved "https://registry.yarnpkg.com/run-parallel/-/run-parallel-1.2.0.tgz#66d1368da7bdf921eb9d95bd1a9229e7f21a43ee" + integrity sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA== + dependencies: + queue-microtask "^1.2.2" + +safe-buffer@^5.0.1, safe-buffer@^5.1.2, safe-buffer@~5.2.0: + version "5.2.1" + resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6" + integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== + +safe-buffer@~5.1.0, safe-buffer@~5.1.1: + version "5.1.2" + resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" + integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== + +safer-buffer@^2.0.2, safer-buffer@^2.1.0, safer-buffer@~2.1.0: + version "2.1.2" + resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a" + integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg== + +scheduler@^0.18.0: + version "0.18.0" + resolved "https://registry.yarnpkg.com/scheduler/-/scheduler-0.18.0.tgz#5901ad6659bc1d8f3fdaf36eb7a67b0d6746b1c4" + integrity sha512-agTSHR1Nbfi6ulI0kYNK0203joW2Y5W4po4l+v03tOoiJKpTBbxpNhWDvqc/4IcOw+KLmSiQLTasZ4cab2/UWQ== + dependencies: + loose-envify "^1.1.0" + object-assign "^4.1.1" + +secure-json-parse@^2.4.0: + version "2.4.0" + resolved "https://registry.yarnpkg.com/secure-json-parse/-/secure-json-parse-2.4.0.tgz#5aaeaaef85c7a417f76271a4f5b0cc3315ddca85" + integrity sha512-Q5Z/97nbON5t/L/sH6mY2EacfjVGwrCcSi5D3btRO2GZ8pf1K1UN7Z9H5J57hjVU2Qzxr1xO+FmBhOvEkzCMmg== + +semver-diff@^3.1.1: + version "3.1.1" + resolved "https://registry.yarnpkg.com/semver-diff/-/semver-diff-3.1.1.tgz#05f77ce59f325e00e2706afd67bb506ddb1ca32b" + integrity sha512-GX0Ix/CJcHyB8c4ykpHGIAvLyOwOobtM/8d+TQkAd81/bEjgPHrfba41Vpesr7jX/t8Uh+R3EX9eAS5be+jQYg== + dependencies: + semver "^6.3.0" + +"semver@2 || 3 || 4 || 5", semver@^5.5.0: + version "5.7.1" + resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.1.tgz#a954f931aeba508d307bbf069eff0c01c96116f7" + integrity sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ== + +semver@^6.0.0, semver@^6.1.0, semver@^6.2.0, semver@^6.3.0: + version "6.3.0" + resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d" + integrity sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw== + +semver@^7.2.1, semver@^7.3.5: + version "7.3.5" + resolved "https://registry.yarnpkg.com/semver/-/semver-7.3.5.tgz#0b621c879348d8998e4b0e4be94b3f12e6018ef7" + integrity sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ== + dependencies: + lru-cache "^6.0.0" + +set-blocking@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7" + integrity sha1-BF+XgtARrppoA93TgrJDkrPYkPc= + +shebang-command@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-2.0.0.tgz#ccd0af4f8835fbdc265b82461aaf0c36663f34ea" + integrity sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA== + dependencies: + shebang-regex "^3.0.0" + +shebang-regex@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-3.0.0.tgz#ae16f1644d873ecad843b0307b143362d4c42172" + integrity sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A== + +side-channel@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/side-channel/-/side-channel-1.0.4.tgz#efce5c8fdc104ee751b25c58d4290011fa5ea2cf" + integrity sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw== + dependencies: + call-bind "^1.0.0" + get-intrinsic "^1.0.2" + object-inspect "^1.9.0" + +signal-exit@^3.0.0, signal-exit@^3.0.2: + version "3.0.3" + resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.3.tgz#a1410c2edd8f077b08b4e253c8eacfcaf057461c" + integrity sha512-VUJ49FC8U1OxwZLxIbTTrDvLnf/6TDgxZcK8wxR8zs13xpx7xbG60ndBlhNrFi2EMuFRoeDoJO7wthSLq42EjA== + +simple-git@^2.39.0: + version "2.42.0" + resolved "https://registry.yarnpkg.com/simple-git/-/simple-git-2.42.0.tgz#1c1ab2f06d28792d9d2d47f8ab75bd1ca5b6a4c3" + integrity sha512-illpUX0bcrdB3AyvBGLz0ToRVP7lXNJOGVybGVuVk7PpivPNK5YKJx2aagKdKbveaMtt0DCLK4/jfjDb6b2M2g== + dependencies: + "@kwsites/file-exists" "^1.1.1" + "@kwsites/promise-deferred" "^1.1.1" + debug "^4.3.1" + +simple-statistics@^7.7.0: + version "7.7.0" + resolved "https://registry.yarnpkg.com/simple-statistics/-/simple-statistics-7.7.0.tgz#cfef964473c940f2adfec85ffde8591a5a933a46" + integrity sha512-TAsZRUJ7FD/yCnm5UBgyWU7bP1gOPsw9n/dVrE8hQ+BF1zJPgDJ5X/MOnxG+HE/7nejSpJLJLdmTh7bkfsFkRw== + +slash@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/slash/-/slash-3.0.0.tgz#6539be870c165adbd5240220dbe361f1bc4d4634" + integrity sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q== + +slice-ansi@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/slice-ansi/-/slice-ansi-2.1.0.tgz#cacd7693461a637a5788d92a7dd4fba068e81636" + integrity sha512-Qu+VC3EwYLldKa1fCxuuvULvSJOKEgk9pi8dZeCVK7TqBfUNTH4sFkk4joj8afVSfAYgJoSOetjx9QWOJ5mYoQ== + dependencies: + ansi-styles "^3.2.0" + astral-regex "^1.0.0" + is-fullwidth-code-point "^2.0.0" + +slice-ansi@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/slice-ansi/-/slice-ansi-3.0.0.tgz#31ddc10930a1b7e0b67b08c96c2f49b77a789787" + integrity sha512-pSyv7bSTC7ig9Dcgbw9AuRNUb5k5V6oDudjZoMBSr13qpLBG7tB+zgCkARjq7xIUgdz5P1Qe8u+rSGdouOOIyQ== + dependencies: + ansi-styles "^4.0.0" + astral-regex "^2.0.0" + is-fullwidth-code-point "^3.0.0" + +slide@~1.1.3: + version "1.1.6" + resolved "https://registry.yarnpkg.com/slide/-/slide-1.1.6.tgz#56eb027d65b4d2dce6cb2e2d32c4d4afc9e1d707" + integrity sha1-VusCfWW00tzmyy4tMsTUr8nh1wc= + +source-map-support@^0.5.16: + version "0.5.19" + resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.19.tgz#a98b62f86dcaf4f67399648c085291ab9e8fed61" + integrity sha512-Wonm7zOCIJzBGQdB+thsPar0kYuCIzYvxZwlBa87yi/Mdjv7Tip2cyVbLj5o0cFPN4EVkuTwb3GDDyUx2DGnGw== + dependencies: + buffer-from "^1.0.0" + source-map "^0.6.0" + +source-map@^0.5.0: + version "0.5.7" + resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.7.tgz#8a039d2d1021d22d1ea14c80d8ea468ba2ef3fcc" + integrity sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w= + +source-map@^0.6.0, source-map@^0.6.1: + version "0.6.1" + resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263" + integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== + +spawn-wrap@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/spawn-wrap/-/spawn-wrap-2.0.0.tgz#103685b8b8f9b79771318827aa78650a610d457e" + integrity sha512-EeajNjfN9zMnULLwhZZQU3GWBoFNkbngTUPfaawT4RkMiviTxcX0qfhVbGey39mfctfDHkWtuecgQ8NJcyQWHg== + dependencies: + foreground-child "^2.0.0" + is-windows "^1.0.2" + make-dir "^3.0.0" + rimraf "^3.0.0" + signal-exit "^3.0.2" + which "^2.0.1" + +spdx-compare@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/spdx-compare/-/spdx-compare-1.0.0.tgz#2c55f117362078d7409e6d7b08ce70a857cd3ed7" + integrity sha512-C1mDZOX0hnu0ep9dfmuoi03+eOdDoz2yvK79RxbcrVEG1NO1Ph35yW102DHWKN4pk80nwCgeMmSY5L25VE4D9A== + dependencies: + array-find-index "^1.0.2" + spdx-expression-parse "^3.0.0" + spdx-ranges "^2.0.0" + +spdx-correct@^3.0.0: + version "3.1.1" + resolved "https://registry.yarnpkg.com/spdx-correct/-/spdx-correct-3.1.1.tgz#dece81ac9c1e6713e5f7d1b6f17d468fa53d89a9" + integrity sha512-cOYcUWwhCuHCXi49RhFRCyJEK3iPj1Ziz9DpViV3tbZOwXD49QzIN3MpOLJNxh2qwq2lJJZaKMVw9qNi4jTC0w== + dependencies: + spdx-expression-parse "^3.0.0" + spdx-license-ids "^3.0.0" + +spdx-exceptions@^2.1.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/spdx-exceptions/-/spdx-exceptions-2.3.0.tgz#3f28ce1a77a00372683eade4a433183527a2163d" + integrity sha512-/tTrYOC7PPI1nUAgx34hUpqXuyJG+DTHJTnIULG4rDygi4xu/tfgmq1e1cIRwRzwZgo4NLySi+ricLkZkw4i5A== + +spdx-expression-parse@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/spdx-expression-parse/-/spdx-expression-parse-3.0.1.tgz#cf70f50482eefdc98e3ce0a6833e4a53ceeba679" + integrity sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q== + dependencies: + spdx-exceptions "^2.1.0" + spdx-license-ids "^3.0.0" + +spdx-license-ids@^3.0.0: + version "3.0.10" + resolved "https://registry.yarnpkg.com/spdx-license-ids/-/spdx-license-ids-3.0.10.tgz#0d9becccde7003d6c658d487dd48a32f0bf3014b" + integrity sha512-oie3/+gKf7QtpitB0LYLETe+k8SifzsX4KixvpOsbI6S0kRiRQ5MKOio8eMSAKQ17N06+wdEOXRiId+zOxo0hA== + +spdx-ranges@^2.0.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/spdx-ranges/-/spdx-ranges-2.1.1.tgz#87573927ba51e92b3f4550ab60bfc83dd07bac20" + integrity sha512-mcdpQFV7UDAgLpXEE/jOMqvK4LBoO0uTQg0uvXUewmEFhpiZx5yJSZITHB8w1ZahKdhfZqP5GPEOKLyEq5p8XA== + +spdx-satisfies@^4.0.0: + version "4.0.1" + resolved "https://registry.yarnpkg.com/spdx-satisfies/-/spdx-satisfies-4.0.1.tgz#9a09a68d80f5f1a31cfaebb384b0c6009e4969fe" + integrity sha512-WVzZ/cXAzoNmjCWiEluEA3BjHp5tiUmmhn9MK+X0tBbR9sOqtC6UQwmgCNrAIZvNlMuBUYAaHYfb2oqlF9SwKA== + dependencies: + spdx-compare "^1.0.0" + spdx-expression-parse "^3.0.0" + spdx-ranges "^2.0.0" + +split2@^3.2.2: + version "3.2.2" + resolved "https://registry.yarnpkg.com/split2/-/split2-3.2.2.tgz#bf2cf2a37d838312c249c89206fd7a17dd12365f" + integrity sha512-9NThjpgZnifTkJpzTZ7Eue85S49QwpNhZTq6GRJwObb6jnLFNGB7Qm73V5HewTROPyxD0C29xqmaI68bQtV+hg== + dependencies: + readable-stream "^3.0.0" + +sprintf-js@~1.0.2: + version "1.0.3" + resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" + integrity sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw= + +sshpk@^1.7.0: + version "1.16.1" + resolved "https://registry.yarnpkg.com/sshpk/-/sshpk-1.16.1.tgz#fb661c0bef29b39db40769ee39fa70093d6f6877" + integrity sha512-HXXqVUq7+pcKeLqqZj6mHFUMvXtOJt1uoUx09pFW6011inTMxqI8BA8PM95myrIyyKwdnzjdFjLiE6KBPVtJIg== + dependencies: + asn1 "~0.2.3" + assert-plus "^1.0.0" + bcrypt-pbkdf "^1.0.0" + dashdash "^1.12.0" + ecc-jsbn "~0.1.1" + getpass "^0.1.1" + jsbn "~0.1.0" + safer-buffer "^2.0.2" + tweetnacl "~0.14.0" + +stack-utils@^2.0.1: + version "2.0.3" + resolved "https://registry.yarnpkg.com/stack-utils/-/stack-utils-2.0.3.tgz#cd5f030126ff116b78ccb3c027fe302713b61277" + integrity sha512-gL//fkxfWUsIlFL2Tl42Cl6+HFALEaB1FU76I/Fy+oZjRreP7OPMXFlGbxM7NQsI0ZpUfw76sHnv0WNYuTb7Iw== + dependencies: + escape-string-regexp "^2.0.0" + +standard-engine@^14.0.1: + version "14.0.1" + resolved "https://registry.yarnpkg.com/standard-engine/-/standard-engine-14.0.1.tgz#fe568e138c3d9768fc59ff81001f7049908a8156" + integrity sha512-7FEzDwmHDOGva7r9ifOzD3BGdTbA7ujJ50afLVdW/tK14zQEptJjbFuUfn50irqdHDcTbNh0DTIoMPynMCXb0Q== + dependencies: + get-stdin "^8.0.0" + minimist "^1.2.5" + pkg-conf "^3.1.0" + xdg-basedir "^4.0.0" + +standard@^16.0.3: + version "16.0.3" + resolved "https://registry.yarnpkg.com/standard/-/standard-16.0.3.tgz#a854c0dd2dea6b9f0b8d20c65260210bd0cee619" + integrity sha512-70F7NH0hSkNXosXRltjSv6KpTAOkUkSfyu3ynyM5dtRUiLtR+yX9EGZ7RKwuGUqCJiX/cnkceVM6HTZ4JpaqDg== + dependencies: + eslint "~7.13.0" + eslint-config-standard "16.0.2" + eslint-config-standard-jsx "10.0.0" + eslint-plugin-import "~2.22.1" + eslint-plugin-node "~11.1.0" + eslint-plugin-promise "~4.2.1" + eslint-plugin-react "~7.21.5" + standard-engine "^14.0.1" + +stoppable@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/stoppable/-/stoppable-1.1.0.tgz#32da568e83ea488b08e4d7ea2c3bcc9d75015d5b" + integrity sha512-KXDYZ9dszj6bzvnEMRYvxgeTHU74QBFL54XKtP3nyMuJ81CFYtABZ3bAzL2EdFUaEwJOBOgENyFj3R7oTzDyyw== + +string-length@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/string-length/-/string-length-3.1.0.tgz#107ef8c23456e187a8abd4a61162ff4ac6e25837" + integrity sha512-Ttp5YvkGm5v9Ijagtaz1BnN+k9ObpvS0eIBblPMp2YWL8FBmi9qblQ9fexc2k/CXFgrTIteU3jAw3payCnwSTA== + dependencies: + astral-regex "^1.0.0" + strip-ansi "^5.2.0" + +string-width@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-1.0.2.tgz#118bdf5b8cdc51a2a7e70d211e07e2b0b9b107d3" + integrity sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M= + dependencies: + code-point-at "^1.0.0" + is-fullwidth-code-point "^1.0.0" + strip-ansi "^3.0.0" + +string-width@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-2.1.1.tgz#ab93f27a8dc13d28cac815c462143a6d9012ae9e" + integrity sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw== + dependencies: + is-fullwidth-code-point "^2.0.0" + strip-ansi "^4.0.0" + +string-width@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-3.1.0.tgz#22767be21b62af1081574306f69ac51b62203961" + integrity sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w== + dependencies: + emoji-regex "^7.0.1" + is-fullwidth-code-point "^2.0.0" + strip-ansi "^5.1.0" + +string-width@^4.0.0, string-width@^4.1.0, string-width@^4.2.0: + version "4.2.2" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.2.tgz#dafd4f9559a7585cfba529c6a0a4f73488ebd4c5" + integrity sha512-XBJbT3N4JhVumXE0eoLU9DCjcaF92KLNqTmFCnG1pf8duUxFGwtP6AD6nkjw9a3IdiRtL3E2w3JDiE/xi3vOeA== + dependencies: + emoji-regex "^8.0.0" + is-fullwidth-code-point "^3.0.0" + strip-ansi "^6.0.0" + +string.prototype.matchall@^4.0.2: + version "4.0.5" + resolved "https://registry.yarnpkg.com/string.prototype.matchall/-/string.prototype.matchall-4.0.5.tgz#59370644e1db7e4c0c045277690cf7b01203c4da" + integrity sha512-Z5ZaXO0svs0M2xd/6By3qpeKpLKd9mO4v4q3oMEQrk8Ck4xOD5d5XeBOOjGrmVZZ/AHB1S0CgG4N5r1G9N3E2Q== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.18.2" + get-intrinsic "^1.1.1" + has-symbols "^1.0.2" + internal-slot "^1.0.3" + regexp.prototype.flags "^1.3.1" + side-channel "^1.0.4" + +string.prototype.trimend@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/string.prototype.trimend/-/string.prototype.trimend-1.0.4.tgz#e75ae90c2942c63504686c18b287b4a0b1a45f80" + integrity sha512-y9xCjw1P23Awk8EvTpcyL2NIr1j7wJ39f+k6lvRnSMz+mz9CGz9NYPelDk42kOz6+ql8xjfK8oYzy3jAP5QU5A== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + +string.prototype.trimstart@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/string.prototype.trimstart/-/string.prototype.trimstart-1.0.4.tgz#b36399af4ab2999b4c9c648bd7a3fb2bb26feeed" + integrity sha512-jh6e984OBfvxS50tdY2nRZnoC5/mLFKOREQfw8t5yytkoUsJRNxvI/E39qu1sD0OtWI3OC0XgKSmcWwziwYuZw== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + +string_decoder@^1.1.1: + version "1.3.0" + resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.3.0.tgz#42f114594a46cf1a8e30b0a84f56c78c3edac21e" + integrity sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA== + dependencies: + safe-buffer "~5.2.0" + +string_decoder@~1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.1.1.tgz#9cf1611ba62685d7030ae9e4ba34149c3af03fc8" + integrity sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg== + dependencies: + safe-buffer "~5.1.0" + +strip-ansi@^3.0.0, strip-ansi@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-3.0.1.tgz#6a385fb8853d952d5ff05d0e8aaf94278dc63dcf" + integrity sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8= + dependencies: + ansi-regex "^2.0.0" + +strip-ansi@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-4.0.0.tgz#a8479022eb1ac368a871389b635262c505ee368f" + integrity sha1-qEeQIusaw2iocTibY1JixQXuNo8= + dependencies: + ansi-regex "^3.0.0" + +strip-ansi@^5.0.0, strip-ansi@^5.1.0, strip-ansi@^5.2.0: + version "5.2.0" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-5.2.0.tgz#8c9a536feb6afc962bdfa5b104a5091c1ad9c0ae" + integrity sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA== + dependencies: + ansi-regex "^4.1.0" + +strip-ansi@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.0.tgz#0b1571dd7669ccd4f3e06e14ef1eed26225ae532" + integrity sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w== + dependencies: + ansi-regex "^5.0.0" + +strip-bom@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-3.0.0.tgz#2334c18e9c759f7bdd56fdef7e9ae3d588e68ed3" + integrity sha1-IzTBjpx1n3vdVv3vfprj1YjmjtM= + +strip-bom@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-4.0.0.tgz#9c3505c1db45bcedca3d9cf7a16f5c5aa3901878" + integrity sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w== + +strip-indent@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/strip-indent/-/strip-indent-3.0.0.tgz#c32e1cee940b6b3432c771bc2c54bcce73cd3001" + integrity sha512-laJTa3Jb+VQpaC6DseHhF7dXVqHTfJPCRDaEbid/drOhgitgYku/letMUqOXFoWV0zIIUbjpdH2t+tYj4bQMRQ== + dependencies: + min-indent "^1.0.0" + +strip-json-comments@^3.1.0, strip-json-comments@^3.1.1: + version "3.1.1" + resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-3.1.1.tgz#31f1281b3832630434831c310c01cccda8cbe006" + integrity sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig== + +strip-json-comments@~2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a" + integrity sha1-PFMZQukIwml8DsNEhYwobHygpgo= + +supports-color@^5.3.0: + version "5.5.0" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f" + integrity sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow== + dependencies: + has-flag "^3.0.0" + +supports-color@^7.0.0, supports-color@^7.1.0: + version "7.2.0" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-7.2.0.tgz#1b7dcdcb32b8138801b3e478ba6a51caa89648da" + integrity sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw== + dependencies: + has-flag "^4.0.0" + +supports-hyperlinks@^2.0.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/supports-hyperlinks/-/supports-hyperlinks-2.2.0.tgz#4f77b42488765891774b70c79babd87f9bd594bb" + integrity sha512-6sXEzV5+I5j8Bmq9/vUphGRM/RJNT9SCURJLjwfOg51heRtguGWDzcaBlgAzKhQa0EVNpPEKzQuBwZ8S8WaCeQ== + dependencies: + has-flag "^4.0.0" + supports-color "^7.0.0" + +table@^5.2.3: + version "5.4.6" + resolved "https://registry.yarnpkg.com/table/-/table-5.4.6.tgz#1292d19500ce3f86053b05f0e8e7e4a3bb21079e" + integrity sha512-wmEc8m4fjnob4gt5riFRtTu/6+4rSe12TpAELNSqHMfF3IqnA+CH37USM6/YR3qRZv7e56kAEAtd6nKZaxe0Ug== + dependencies: + ajv "^6.10.2" + lodash "^4.17.14" + slice-ansi "^2.1.0" + string-width "^3.0.0" + +tap-mocha-reporter@^5.0.0: + version "5.0.1" + resolved "https://registry.yarnpkg.com/tap-mocha-reporter/-/tap-mocha-reporter-5.0.1.tgz#74f00be2ddd2a380adad45e085795137bc39497a" + integrity sha512-1knFWOwd4khx/7uSEnUeaP9IPW3w+sqTgJMhrwah6t46nZ8P25atOKAjSvVDsT67lOPu0nfdOqUwoyKn+3E5pA== + dependencies: + color-support "^1.1.0" + debug "^4.1.1" + diff "^4.0.1" + escape-string-regexp "^2.0.0" + glob "^7.0.5" + tap-parser "^10.0.0" + tap-yaml "^1.0.0" + unicode-length "^2.0.2" + +tap-parser@^10.0.0, tap-parser@^10.0.1: + version "10.1.0" + resolved "https://registry.yarnpkg.com/tap-parser/-/tap-parser-10.1.0.tgz#7b1aac40dbcaa4716c0b58952686eae65d2b74ad" + integrity sha512-FujQeciDaOiOvaIVGS1Rpb0v4R6XkOjvWCWowlz5oKuhPkEJ8U6pxgqt38xuzYhPt8dWEnfHn2jqpZdJEkW7pA== + dependencies: + events-to-array "^1.0.1" + minipass "^3.0.0" + tap-yaml "^1.0.0" + +tap-yaml@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/tap-yaml/-/tap-yaml-1.0.0.tgz#4e31443a5489e05ca8bbb3e36cef71b5dec69635" + integrity sha512-Rxbx4EnrWkYk0/ztcm5u3/VznbyFJpyXO12dDBHKWiDVxy7O2Qw6MRrwO5H6Ww0U5YhRY/4C/VzWmFPhBQc4qQ== + dependencies: + yaml "^1.5.0" + +tap@^15.0.9: + version "15.0.9" + resolved "https://registry.yarnpkg.com/tap/-/tap-15.0.9.tgz#21347a789e88798a94aa151d25ea1c92d04c27ef" + integrity sha512-bqY5SxEqYKRd37PIUfKBf9HMs/hklyl/fGXkuStr9rYTIGa0/icpSLsm6IVOmx2qT0/TliPNJ6OvS5kddJYHdg== + dependencies: + "@types/react" "^16.9.23" + chokidar "^3.3.0" + coveralls "^3.0.11" + findit "^2.0.0" + foreground-child "^2.0.0" + fs-exists-cached "^1.0.0" + glob "^7.1.6" + import-jsx "^4.0.0" + ink "^2.7.1" + isexe "^2.0.0" + istanbul-lib-processinfo "^2.0.2" + jackspeak "^1.4.0" + libtap "^1.1.1" + minipass "^3.1.1" + mkdirp "^1.0.4" + nyc "^15.1.0" + opener "^1.5.1" + react "^16.12.0" + rimraf "^3.0.0" + signal-exit "^3.0.0" + source-map-support "^0.5.16" + tap-mocha-reporter "^5.0.0" + tap-parser "^10.0.1" + tap-yaml "^1.0.0" + tcompare "^5.0.6" + treport "^2.0.2" + which "^2.0.2" + +tcompare@^5.0.1, tcompare@^5.0.6: + version "5.0.6" + resolved "https://registry.yarnpkg.com/tcompare/-/tcompare-5.0.6.tgz#b809135cca3d165fb17d5eabb9ed292629e38e31" + integrity sha512-OvO7omN/wkdsKzmOqr3sQFfLbghs/2X5mwSkcfgRiXZshfPnTsAs3IRf1RixR/Pff26qG/r9ogcZMpV0YdeGXg== + dependencies: + diff "^4.0.2" + +term-size@^2.1.0: + version "2.2.1" + resolved "https://registry.yarnpkg.com/term-size/-/term-size-2.2.1.tgz#2a6a54840432c2fb6320fea0f415531e90189f54" + integrity sha512-wK0Ri4fOGjv/XPy8SBHZChl8CM7uMc5VML7SqiQ0zG7+J5Vr+RMQDoHa2CNT6KHUnTGIXH34UDMkPzAUyapBZg== + +test-exclude@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/test-exclude/-/test-exclude-6.0.0.tgz#04a8698661d805ea6fa293b6cb9e63ac044ef15e" + integrity sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w== + dependencies: + "@istanbuljs/schema" "^0.1.2" + glob "^7.1.4" + minimatch "^3.0.4" + +text-table@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/text-table/-/text-table-0.2.0.tgz#7f5ee823ae805207c00af2df4a84ec3fcfa570b4" + integrity sha1-f17oI66AUgfACvLfSoTsP8+lcLQ= + +to-fast-properties@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-2.0.0.tgz#dc5e698cbd079265bc73e0377681a4e4e83f616e" + integrity sha1-3F5pjL0HkmW8c+A3doGk5Og/YW4= + +to-readable-stream@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/to-readable-stream/-/to-readable-stream-1.0.0.tgz#ce0aa0c2f3df6adf852efb404a783e77c0475771" + integrity sha512-Iq25XBt6zD5npPhlLVXGFN3/gyR2/qODcKNNyTMd4vbm39HUaOiAM4PMq0eMVC/Tkxz+Zjdsc55g9yyz+Yq00Q== + +to-regex-range@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-5.0.1.tgz#1648c44aae7c8d988a326018ed72f5b4dd0392e4" + integrity sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ== + dependencies: + is-number "^7.0.0" + +tough-cookie@~2.5.0: + version "2.5.0" + resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-2.5.0.tgz#cd9fb2a0aa1d5a12b473bd9fb96fa3dcff65ade2" + integrity sha512-nlLsUzgm1kfLXSXfRZMc1KLAugd4hqJHDTvc2hDIwS3mZAfMEuMbc03SujMF+GEcpaX/qboeycw6iO8JwVv2+g== + dependencies: + psl "^1.1.28" + punycode "^2.1.1" + +treeify@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/treeify/-/treeify-1.1.0.tgz#4e31c6a463accd0943879f30667c4fdaff411bb8" + integrity sha512-1m4RA7xVAJrSGrrXGs0L3YTwyvBs2S8PbRHaLZAkFw7JR8oIFwYtysxlBZhYIa7xSyiYJKZ3iGrrk55cGA3i9A== + +treport@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/treport/-/treport-2.0.2.tgz#a007278c01335c1209e3f5c20e98ef14fd736cc2" + integrity sha512-AnHKgHMy3II7Arfvf1tSHAwv9rzcvgbWrOixFJgdExVKd0mMsOp9wD2LGP9RbXy9j8AZoerBVu3OR2Uz9MpUJw== + dependencies: + cardinal "^2.1.1" + chalk "^3.0.0" + import-jsx "^4.0.0" + ink "^2.6.0" + ms "^2.1.2" + string-length "^3.1.0" + tap-parser "^10.0.1" + unicode-length "^2.0.2" + +trim-newlines@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/trim-newlines/-/trim-newlines-3.0.1.tgz#260a5d962d8b752425b32f3a7db0dcacd176c144" + integrity sha512-c1PTsA3tYrIsLGkJkzHF+w9F2EyxfXGo4UyJc4pFL++FMjnq0HJS69T3M7d//gKrFKwy429bouPescbjecU+Zw== + +trivial-deferred@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/trivial-deferred/-/trivial-deferred-1.0.1.tgz#376d4d29d951d6368a6f7a0ae85c2f4d5e0658f3" + integrity sha1-N21NKdlR1jaKb3oK6FwvTV4GWPM= + +tsconfig-paths@^3.9.0: + version "3.10.1" + resolved "https://registry.yarnpkg.com/tsconfig-paths/-/tsconfig-paths-3.10.1.tgz#79ae67a68c15289fdf5c51cb74f397522d795ed7" + integrity sha512-rETidPDgCpltxF7MjBZlAFPUHv5aHH2MymyPvh+vEyWAED4Eb/WeMbsnD/JDr4OKPOA1TssDHgIcpTN5Kh0p6Q== + dependencies: + json5 "^2.2.0" + minimist "^1.2.0" + strip-bom "^3.0.0" + +tsd@^0.15.1: + version "0.15.1" + resolved "https://registry.yarnpkg.com/tsd/-/tsd-0.15.1.tgz#d0c733c623d59de52f180ae7af66a1fde9e6c533" + integrity sha512-8ADO2rPntfNiJV4KiqJiiiitfkXLxCbKEFN672JgwNiaEIuiyurTc1+w3InZ+0DqBz73B6Z3UflZcNGw5xMaDA== + dependencies: + eslint-formatter-pretty "^4.0.0" + globby "^11.0.1" + meow "^7.0.1" + path-exists "^4.0.0" + read-pkg-up "^7.0.0" + update-notifier "^4.1.0" + +tunnel-agent@^0.6.0: + version "0.6.0" + resolved "https://registry.yarnpkg.com/tunnel-agent/-/tunnel-agent-0.6.0.tgz#27a5dea06b36b04a0a9966774b290868f0fc40fd" + integrity sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0= + dependencies: + safe-buffer "^5.0.1" + +tweetnacl@^0.14.3, tweetnacl@~0.14.0: + version "0.14.5" + resolved "https://registry.yarnpkg.com/tweetnacl/-/tweetnacl-0.14.5.tgz#5ae68177f192d4456269d108afa93ff8743f4f64" + integrity sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q= + +type-check@^0.4.0, type-check@~0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.4.0.tgz#07b8203bfa7056c0657050e3ccd2c37730bab8f1" + integrity sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew== + dependencies: + prelude-ls "^1.2.1" + +type-detect@4.0.8: + version "4.0.8" + resolved "https://registry.yarnpkg.com/type-detect/-/type-detect-4.0.8.tgz#7646fb5f18871cfbb7749e69bd39a6388eb7450c" + integrity sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g== + +type-fest@^0.13.1: + version "0.13.1" + resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.13.1.tgz#0172cb5bce80b0bd542ea348db50c7e21834d934" + integrity sha512-34R7HTnG0XIJcBSn5XhDd7nNFPRcXYRZrBB2O2jdKqYODldSzBAqzsWoZYYvduky73toYS/ESqxPvkDf/F0XMg== + +type-fest@^0.21.3: + version "0.21.3" + resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.21.3.tgz#d260a24b0198436e133fa26a524a6d65fa3b2e37" + integrity sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w== + +type-fest@^0.3.0: + version "0.3.1" + resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.3.1.tgz#63d00d204e059474fe5e1b7c011112bbd1dc29e1" + integrity sha512-cUGJnCdr4STbePCgqNFbpVNCepa+kAVohJs1sLhxzdH+gnEoOd8VhbYa7pD3zZYGiURWM2xzEII3fQcRizDkYQ== + +type-fest@^0.6.0: + version "0.6.0" + resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.6.0.tgz#8d2a2370d3df886eb5c90ada1c5bf6188acf838b" + integrity sha512-q+MB8nYR1KDLrgr4G5yemftpMC7/QLqVndBmEEdqzmNj5dcFOO4Oo8qlwZE3ULT3+Zim1F8Kq4cBnikNhlCMlg== + +type-fest@^0.8.0, type-fest@^0.8.1: + version "0.8.1" + resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.8.1.tgz#09e249ebde851d3b1e48d27c105444667f17b83d" + integrity sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA== + +typedarray-to-buffer@^3.1.5: + version "3.1.5" + resolved "https://registry.yarnpkg.com/typedarray-to-buffer/-/typedarray-to-buffer-3.1.5.tgz#a97ee7a9ff42691b9f783ff1bc5112fe3fca9080" + integrity sha512-zdu8XMNEDepKKR+XYOXAVPtWui0ly0NtohUscw+UmaHiAWT8hrV1rr//H6V+0DvJ3OQ19S979M0laLfX8rm82Q== + dependencies: + is-typedarray "^1.0.0" + +unbox-primitive@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/unbox-primitive/-/unbox-primitive-1.0.1.tgz#085e215625ec3162574dc8859abee78a59b14471" + integrity sha512-tZU/3NqK3dA5gpE1KtyiJUrEB0lxnGkMFHptJ7q6ewdZ8s12QrODwNbhIJStmJkd1QDXa1NRA8aF2A1zk/Ypyw== + dependencies: + function-bind "^1.1.1" + has-bigints "^1.0.1" + has-symbols "^1.0.2" + which-boxed-primitive "^1.0.2" + +unicode-length@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/unicode-length/-/unicode-length-2.0.2.tgz#e5eb4c0d523fdf7bebb59ca261c9ca1cf732da96" + integrity sha512-Ph/j1VbS3/r77nhoY2WU0GWGjVYOHL3xpKp0y/Eq2e5r0mT/6b649vm7KFO6RdAdrZkYLdxphYVgvODxPB+Ebg== + dependencies: + punycode "^2.0.0" + strip-ansi "^3.0.1" + +unique-string@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/unique-string/-/unique-string-2.0.0.tgz#39c6451f81afb2749de2b233e3f7c5e8843bd89d" + integrity sha512-uNaeirEPvpZWSgzwsPGtU2zVSTrn/8L5q/IexZmH0eH6SA73CmAA5U4GwORTxQAZs95TAXLNqeLoPPNO5gZfWg== + dependencies: + crypto-random-string "^2.0.0" + +update-notifier@^4.1.0: + version "4.1.3" + resolved "https://registry.yarnpkg.com/update-notifier/-/update-notifier-4.1.3.tgz#be86ee13e8ce48fb50043ff72057b5bd598e1ea3" + integrity sha512-Yld6Z0RyCYGB6ckIjffGOSOmHXj1gMeE7aROz4MG+XMkmixBX4jUngrGXNYz7wPKBmtoD4MnBa2Anu7RSKht/A== + dependencies: + boxen "^4.2.0" + chalk "^3.0.0" + configstore "^5.0.1" + has-yarn "^2.1.0" + import-lazy "^2.1.0" + is-ci "^2.0.0" + is-installed-globally "^0.3.1" + is-npm "^4.0.0" + is-yarn-global "^0.3.0" + latest-version "^5.0.0" + pupa "^2.0.1" + semver-diff "^3.1.1" + xdg-basedir "^4.0.0" + +uri-js@^4.2.2: + version "4.4.1" + resolved "https://registry.yarnpkg.com/uri-js/-/uri-js-4.4.1.tgz#9b1a52595225859e55f669d928f88c6c57f2a77e" + integrity sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg== + dependencies: + punycode "^2.1.0" + +url-parse-lax@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/url-parse-lax/-/url-parse-lax-3.0.0.tgz#16b5cafc07dbe3676c1b1999177823d6503acb0c" + integrity sha1-FrXK/Afb42dsGxmZF3gj1lA6yww= + dependencies: + prepend-http "^2.0.0" + +util-deprecate@^1.0.1, util-deprecate@~1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" + integrity sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8= + +util-extend@^1.0.1: + version "1.0.3" + resolved "https://registry.yarnpkg.com/util-extend/-/util-extend-1.0.3.tgz#a7c216d267545169637b3b6edc6ca9119e2ff93f" + integrity sha1-p8IW0mdUUWljeztu3GypEZ4v+T8= + +uuid@^3.3.2, uuid@^3.3.3: + version "3.4.0" + resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.4.0.tgz#b23e4358afa8a202fe7a100af1f5f883f02007ee" + integrity sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A== + +v8-compile-cache@^2.0.3: + version "2.3.0" + resolved "https://registry.yarnpkg.com/v8-compile-cache/-/v8-compile-cache-2.3.0.tgz#2de19618c66dc247dcfb6f99338035d8245a2cee" + integrity sha512-l8lCEmLcLYZh4nbunNZvQCJc5pv7+RCwa8q/LdUx8u7lsWvPDKmpodJAJNwkAhJC//dFY48KuIEmjtd4RViDrA== + +validate-npm-package-license@^3.0.1: + version "3.0.4" + resolved "https://registry.yarnpkg.com/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz#fc91f6b9c7ba15c857f4cb2c5defeec39d4f410a" + integrity sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew== + dependencies: + spdx-correct "^3.0.0" + spdx-expression-parse "^3.0.0" + +verror@1.10.0: + version "1.10.0" + resolved "https://registry.yarnpkg.com/verror/-/verror-1.10.0.tgz#3a105ca17053af55d6e270c1f8288682e18da400" + integrity sha1-OhBcoXBTr1XW4nDB+CiGguGNpAA= + dependencies: + assert-plus "^1.0.0" + core-util-is "1.0.2" + extsprintf "^1.2.0" + +wcwidth@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/wcwidth/-/wcwidth-1.0.1.tgz#f0b0dcf915bc5ff1528afadb2c0e17b532da2fe8" + integrity sha1-8LDc+RW8X/FSivrbLA4XtTLaL+g= + dependencies: + defaults "^1.0.3" + +which-boxed-primitive@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz#13757bc89b209b049fe5d86430e21cf40a89a8e6" + integrity sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg== + dependencies: + is-bigint "^1.0.1" + is-boolean-object "^1.1.0" + is-number-object "^1.0.4" + is-string "^1.0.5" + is-symbol "^1.0.3" + +which-module@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/which-module/-/which-module-2.0.0.tgz#d9ef07dce77b9902b8a3a8fa4b31c3e3f7e6e87a" + integrity sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho= + +which@^2.0.1, which@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/which/-/which-2.0.2.tgz#7c6a8dd0a636a0327e10b59c9286eee93f3f51b1" + integrity sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA== + dependencies: + isexe "^2.0.0" + +widest-line@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/widest-line/-/widest-line-3.1.0.tgz#8292333bbf66cb45ff0de1603b136b7ae1496eca" + integrity sha512-NsmoXalsWVDMGupxZ5R08ka9flZjjiLvHVAWYOKtiKM8ujtZWr9cRffak+uSE48+Ob8ObalXpwyeUiyDD6QFgg== + dependencies: + string-width "^4.0.0" + +word-wrap@^1.2.3: + version "1.2.3" + resolved "https://registry.yarnpkg.com/word-wrap/-/word-wrap-1.2.3.tgz#610636f6b1f703891bd34771ccb17fb93b47079c" + integrity sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ== + +workq@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/workq/-/workq-3.0.0.tgz#2a54cd67bc65bc7595548c8fafcd493ab4e0c559" + integrity sha512-zCLwCuqc1WMiCtcbtKBtkgOkFHCvGsmkJ6IbgazOcctPXGjM8/AYKBjYaJvKzsMigbUW3CFApZudMor4E6D6zA== + dependencies: + debug "^4.1.1" + +wrap-ansi@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-2.1.0.tgz#d8fc3d284dd05794fe84973caecdd1cf824fdd85" + integrity sha1-2Pw9KE3QV5T+hJc8rs3Rz4JP3YU= + dependencies: + string-width "^1.0.1" + strip-ansi "^3.0.1" + +wrap-ansi@^5.0.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-5.1.0.tgz#1fd1f67235d5b6d0fee781056001bfb694c03b09" + integrity sha512-QC1/iN/2/RPVJ5jYK8BGttj5z83LmSKmvbvrXPNCLZSEb32KKVDJDl/MOt2N01qU2H/FkzEa9PKto1BqDjtd7Q== + dependencies: + ansi-styles "^3.2.0" + string-width "^3.0.0" + strip-ansi "^5.0.0" + +wrap-ansi@^6.2.0: + version "6.2.0" + resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-6.2.0.tgz#e9393ba07102e6c91a3b221478f0257cd2856e53" + integrity sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA== + dependencies: + ansi-styles "^4.0.0" + string-width "^4.1.0" + strip-ansi "^6.0.0" + +wrappy@1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" + integrity sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8= + +write-file-atomic@^3.0.0: + version "3.0.3" + resolved "https://registry.yarnpkg.com/write-file-atomic/-/write-file-atomic-3.0.3.tgz#56bd5c5a5c70481cd19c571bd39ab965a5de56e8" + integrity sha512-AvHcyZ5JnSfq3ioSyjrBkH9yW4m7Ayk8/9My/DD9onKeu/94fwrMocemO2QAJFAlnnDN+ZDS+ZjAR5ua1/PV/Q== + dependencies: + imurmurhash "^0.1.4" + is-typedarray "^1.0.0" + signal-exit "^3.0.2" + typedarray-to-buffer "^3.1.5" + +write@1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/write/-/write-1.0.3.tgz#0800e14523b923a387e415123c865616aae0f5c3" + integrity sha512-/lg70HAjtkUgWPVZhZcm+T4hkL8Zbtp1nFNOn3lRrxnlv50SRBv7cR7RqR+GMsd3hUXy9hWBo4CHTbFTcOYwig== + dependencies: + mkdirp "^0.5.1" + +xdg-basedir@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/xdg-basedir/-/xdg-basedir-4.0.0.tgz#4bc8d9984403696225ef83a1573cbbcb4e79db13" + integrity sha512-PSNhEJDejZYV7h50BohL09Er9VaIefr2LMAf3OEmpCkjOi34eYyQYAXUTjEQtZJTKcF0E2UKTh+osDLsgNim9Q== + +xmlbuilder2@^2.4.1: + version "2.4.1" + resolved "https://registry.yarnpkg.com/xmlbuilder2/-/xmlbuilder2-2.4.1.tgz#899c783a833188c5a5aa6f3c5428a3963f3e479d" + integrity sha512-vliUplZsk5vJnhxXN/mRcij/AE24NObTUm/Zo4vkLusgayO6s3Et5zLEA14XZnY1c3hX5o1ToR0m0BJOPy0UvQ== + dependencies: + "@oozcitak/dom" "1.15.8" + "@oozcitak/infra" "1.0.8" + "@oozcitak/util" "8.3.8" + "@types/node" "*" + js-yaml "3.14.0" + +y18n@^4.0.0: + version "4.0.3" + resolved "https://registry.yarnpkg.com/y18n/-/y18n-4.0.3.tgz#b5f259c82cd6e336921efd7bfd8bf560de9eeedf" + integrity sha512-JKhqTOwSrqNA1NY5lSztJ1GrBiUodLMmIZuLiDaMRJ+itFd+ABVE8XBjOvIWL+rSqNDC74LCSFmlb/U4UZ4hJQ== + +yallist@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72" + integrity sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A== + +yaml@^1.5.0: + version "1.10.2" + resolved "https://registry.yarnpkg.com/yaml/-/yaml-1.10.2.tgz#2301c5ffbf12b467de8da2333a459e29e7920e4b" + integrity sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg== + +yapool@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/yapool/-/yapool-1.0.0.tgz#f693f29a315b50d9a9da2646a7a6645c96985b6a" + integrity sha1-9pPymjFbUNmp2iZGp6ZkXJaYW2o= + +yargs-parser@^18.1.2, yargs-parser@^18.1.3: + version "18.1.3" + resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-18.1.3.tgz#be68c4975c6b2abf469236b0c870362fab09a7b0" + integrity sha512-o50j0JeToy/4K6OZcaQmW6lyXXKhq7csREXcDwk2omFPJEwUNOVtJKvmDr9EI1fAJZUyZcRF7kxGBWmRXudrCQ== + dependencies: + camelcase "^5.0.0" + decamelize "^1.2.0" + +yargs@^15.0.2: + version "15.4.1" + resolved "https://registry.yarnpkg.com/yargs/-/yargs-15.4.1.tgz#0d87a16de01aee9d8bec2bfbf74f67851730f4f8" + integrity sha512-aePbxDmcYW++PaqBsJ+HYUFwCdv4LVvdnhBy78E57PIor8/OVvhMrADFFEDh8DHDFRv/O9i3lPhsENjO7QX0+A== + dependencies: + cliui "^6.0.0" + decamelize "^1.2.0" + find-up "^4.1.0" + get-caller-file "^2.0.1" + require-directory "^2.1.1" + require-main-filename "^2.0.0" + set-blocking "^2.0.0" + string-width "^4.2.0" + which-module "^2.0.0" + y18n "^4.0.0" + yargs-parser "^18.1.2" + +yoga-layout-prebuilt@^1.9.3: + version "1.10.0" + resolved "https://registry.yarnpkg.com/yoga-layout-prebuilt/-/yoga-layout-prebuilt-1.10.0.tgz#2936fbaf4b3628ee0b3e3b1df44936d6c146faa6" + integrity sha512-YnOmtSbv4MTf7RGJMK0FvZ+KD8OEe/J5BNnR0GHhD8J/XcG/Qvxgszm0Un6FTHWW4uHlTgP0IztiXQnGyIR45g== + dependencies: + "@types/yoga-layout" "1.9.2" From 1eda789e9f5f345f1bc8e0466b294ff4e00c050d Mon Sep 17 00:00:00 2001 From: Bishoy Boktor Date: Fri, 13 Aug 2021 18:31:55 +0000 Subject: [PATCH 02/10] Change opensearch-js reference back to elasticsearch-js since we're referencing elastic Signed-off-by: Bishoy Boktor --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 0b0409230..30fcec3fa 100644 --- a/README.md +++ b/README.md @@ -10,7 +10,7 @@ OpenSearch Node.js client ## Welcome! -**opensearch-js** is [a community-driven, open source fork](https://aws.amazon.com/blogs/opensource/introducing-opensearch/) of opensearch-js licensed under the [Apache v2.0 License](LICENSE.txt). For more information, see [opensearch.org](https://opensearch.org/). +**opensearch-js** is [a community-driven, open source fork](https://aws.amazon.com/blogs/opensource/introducing-opensearch/) of elasticsearch-js licensed under the [Apache v2.0 License](LICENSE.txt). For more information, see [opensearch.org](https://opensearch.org/). ## Project Resources From 762c01bcfa8148bb75a248875695739145b1ef4a Mon Sep 17 00:00:00 2001 From: Bishoy Boktor Date: Thu, 19 Aug 2021 19:19:39 +0000 Subject: [PATCH 03/10] Fix name mappings and spacing issues Signed-off-by: Bishoy Boktor --- .ci/run-opensearch.sh | 12 +++++----- api/new.d.ts | 2 +- docs/examples/proxy/README.md | 6 ++--- index.d.ts | 4 ++-- index.js | 2 +- lib/Connection.js | 2 +- lib/Helpers.js | 4 ++-- lib/Transport.js | 8 +++---- scripts/download-artifacts.js | 22 ++++++++--------- scripts/utils/clone-opensearch.js | 30 ++++++++++++------------ scripts/utils/generateDocs.js | 6 ++--- scripts/wait-cluster.sh | 4 ++-- test/acceptance/product-check.test.js | 4 ++-- test/benchmarks/suite.js | 8 +++---- test/fixtures/stackoverflow.ndjson | 14 +++++------ test/integration/README.md | 16 ++++--------- test/integration/helpers/bulk.test.js | 2 +- test/integration/helpers/msearch.test.js | 2 +- test/integration/helpers/scroll.test.js | 2 +- test/integration/helpers/search.test.js | 2 +- test/integration/index.js | 4 ++-- test/types/new-types.test-d.ts | 18 +++++++------- test/unit/connection.test.js | 2 +- 23 files changed, 85 insertions(+), 91 deletions(-) diff --git a/.ci/run-opensearch.sh b/.ci/run-opensearch.sh index 7b33b7aab..1138ede19 100755 --- a/.ci/run-opensearch.sh +++ b/.ci/run-opensearch.sh @@ -24,14 +24,14 @@ source $script_path/functions/imports.sh set -euo pipefail echo -e "\033[34;1mINFO:\033[0m Take down node if called twice with the same arguments (DETACH=true) or on seperate terminals \033[0m" -cleanup_node $os_node_name +cleanup_node $opensearch_node_name -master_node_name=${os_node_name} +master_node_name=${opensearch_node_name} cluster_name=${moniker}${suffix} declare -a volumes environment=($(cat <<-END - --env node.name=$os_node_name + --env node.name=$opensearch_node_name --env cluster.name=$cluster_name --env cluster.initial_master_nodes=$master_node_name --env discovery.seed_hosts=$master_node_name @@ -68,9 +68,9 @@ done NUMBER_OF_NODES=${NUMBER_OF_NODES-1} http_port=9200 for (( i=0; i<$NUMBER_OF_NODES; i++, http_port++ )); do - node_name=${os_node_name}$i + node_name=${opensearch_node_name}$i node_url=${external_opensearch_url/9200/${http_port}}$i - if [[ "$i" == "0" ]]; then node_name=$os_node_name; fi + if [[ "$i" == "0" ]]; then node_name=$opensearch_node_name; fi environment+=($(cat <<-END --env node.name=$node_name END @@ -105,7 +105,7 @@ END docker.opensearch.co/opensearch/"$opensearch_container"; set +x - if wait_for_container "$os_node_name" "$network_name"; then + if wait_for_container "$opensearch_node_name" "$network_name"; then echo -e "\033[32;1mSUCCESS:\033[0m Running on: $node_url\033[0m" fi diff --git a/api/new.d.ts b/api/new.d.ts index a4faab64c..bb4e26cb5 100644 --- a/api/new.d.ts +++ b/api/new.d.ts @@ -695,7 +695,7 @@ declare class Client { updateByQueryRethrottle(params: T.UpdateByQueryRethrottleRequest, options: TransportRequestOptions, callback: callbackFn): TransportRequestCallback } -export * as ostypes from './types' +export * as opensearchtypes from './types' export { Client, Transport, diff --git a/docs/examples/proxy/README.md b/docs/examples/proxy/README.md index 05cf534bd..cfba3f195 100644 --- a/docs/examples/proxy/README.md +++ b/docs/examples/proxy/README.md @@ -1,7 +1,7 @@ -# OpenSearchproxy example +# OpenSearch proxy example This folder contains an example of how to build a lightweight proxy -between your frontend code and OpenSearchif you don't +between your frontend code and OpenSearch if you don't have a more sophisticated backend in place yet. > **IMPORTANT:** This is not a production ready code and it is only for demonstration purposes, @@ -42,7 +42,7 @@ or read the [quickstart](https://vercel.com/docs) documentation. ## Authentication -If you are using OpenSearchonly for search purposes, such as a search box, you can create +If you are using OpenSearch only for search purposes, such as a search box, you can create an Api Key with `read` permissions and store it in your frontend app. Then you can send it via `Authorization` header to the proxy and run your searches. diff --git a/index.d.ts b/index.d.ts index 315f71105..57a2368ae 100644 --- a/index.d.ts +++ b/index.d.ts @@ -58,7 +58,7 @@ import { import Serializer from './lib/Serializer'; import Helpers from './lib/Helpers'; import * as errors from './lib/errors'; -import * as ostypes from './api/types' +import * as opensearchtypes from './api/types' import * as RequestParams from './api/requestParams' declare type callbackFn = (err: ApiError, result: ApiResponse) => void; @@ -1202,7 +1202,7 @@ export { ApiResponse, RequestEvent, ResurrectEvent, - ostypes, + opensearchtypes, RequestParams, ClientOptions, NodeOptions, diff --git a/index.js b/index.js index 5c52c95bd..50a9ef246 100644 --- a/index.js +++ b/index.js @@ -72,7 +72,7 @@ class Client extends OSAPI { opts.node = `https://${cloudUrls[1]}.${cloudUrls[0]}` // Cloud has better performances with compression enabled - // see https://github.com/opensearch-project/opensearch-py/pull/704. + // see https://github.com/elastic/elasticsearch-py/pull/704. // So unless the user specifies otherwise, we enable compression. if (opts.compression == null) opts.compression = 'gzip' if (opts.suggestCompression == null) opts.suggestCompression = true diff --git a/lib/Connection.js b/lib/Connection.js index ef8e1ce27..c0ab2e6fd 100644 --- a/lib/Connection.js +++ b/lib/Connection.js @@ -217,7 +217,7 @@ class Connection { path: '', href: url.href, origin: url.origin, - // https://github.com/opensearch-project/opensearch-js/issues/843 + // https://github.com/elastic/elasticsearch-js/issues/843 port: url.port !== '' ? url.port : undefined, headers: this.headers, agent: this.agent diff --git a/lib/Helpers.js b/lib/Helpers.js index febe8e353..adbccdc0d 100644 --- a/lib/Helpers.js +++ b/lib/Helpers.js @@ -52,7 +52,7 @@ class Helpers { /** * Runs a search operation. The only difference between client.search and this utility, - * is that we are only returning the hits to the user and not the full opensearch response. + * is that we are only returning the hits to the user and not the full OpenSearch response. * This helper automatically adds `filter_path=hits.hits._source` to the querystring, * as it will only need the documents source. * @param {object} params - The OpenSearch's search parameters. @@ -723,7 +723,7 @@ class Helpers { if (status >= 400) { // 429 is the only staus code where we might want to retry // a document, because it was not an error in the document itself, - // but the opensearch node were handling too many operations. + // but the OpenSearch node were handling too many operations. if (status === 429) { retry.push(bulkBody[indexSlice]) /* istanbul ignore next */ diff --git a/lib/Transport.js b/lib/Transport.js index 3b709f60a..75a781f4d 100644 --- a/lib/Transport.js +++ b/lib/Transport.js @@ -561,7 +561,7 @@ class Transport { debug('compatible check failed', err) if (err.statusCode === 401 || err.statusCode === 403) { this[kCompatibleCheck] = 2 - process.emitWarning('The client is unable to verify that the server is OpenSearchdue to security privileges on the server side. Some functionality may not be compatible if the server is running an unsupported product.') + process.emitWarning('The client is unable to verify that the server is OpenSearch due to security privileges on the server side. Some functionality may not be compatible if the server is running an unsupported product.') compatibleCheckEmitter.emit('compatible-check', true) } else { this[kCompatibleCheck] = 0 @@ -570,7 +570,7 @@ class Transport { } else { debug('Checking opensearch version', result.body, result.headers) if (result.body.version == null || typeof result.body.version.number !== 'string') { - debug('Can\'t access OpenSearchversion') + debug('Can\'t access OpenSearch version') return compatibleCheckEmitter.emit('compatible-check', false) } @@ -587,11 +587,11 @@ class Transport { // support odfe > v7 validation if (major !== 7) { - debug('Invalid OpenSearchdistribution') + debug('Invalid OpenSearch distribution') return compatibleCheckEmitter.emit('compatible-check', false) } - debug('Valid OpenSearchdistribution') + debug('Valid OpenSearch distribution') this[kCompatibleCheck] = 2 compatibleCheckEmitter.emit('compatible-check', true) } diff --git a/scripts/download-artifacts.js b/scripts/download-artifacts.js index 87a1625ca..ffa5569ad 100644 --- a/scripts/download-artifacts.js +++ b/scripts/download-artifacts.js @@ -45,11 +45,11 @@ const pipeline = promisify(stream.pipeline) const unzip = promisify(crossZip.unzip) const rm = promisify(rimraf) -const osFolder = join(__dirname, '..', 'opensearch') -const zipFolder = join(osFolder, 'artifacts.zip') -const specFolder = join(osFolder, 'rest-api-spec', 'api') -const ossTestFolder = join(osFolder, 'rest-api-spec', 'test', 'oss') -const artifactInfo = join(osFolder, 'info.json') +const opensearchFolder = join(__dirname, '..', 'opensearch') +const zipFolder = join(opensearchFolder, 'artifacts.zip') +const specFolder = join(opensearchFolder, 'rest-api-spec', 'api') +const ossTestFolder = join(opensearchFolder, 'rest-api-spec', 'test', 'oss') +const artifactInfo = join(opensearchFolder, 'info.json') async function downloadArtifacts (opts) { if (typeof opts.version !== 'string') { @@ -81,8 +81,8 @@ async function downloadArtifacts (opts) { } log.text = 'Cleanup checkouts/opensearch' - await rm(osFolder) - await mkdir(osFolder, { recursive: true }) + await rm(opensearchFolder) + await mkdir(opensearchFolder, { recursive: true }) log.text = 'Downloading artifacts' const response = await fetch(resolved.url) @@ -93,7 +93,7 @@ async function downloadArtifacts (opts) { await pipeline(response.body, createWriteStream(zipFolder)) log.text = 'Unzipping' - await unzip(zipFolder, osFolder) + await unzip(zipFolder, opensearchFolder) log.text = 'Cleanup' await rm(zipFolder) @@ -119,7 +119,7 @@ async function resolve (version, hash) { } const data = await response.json() - const esBuilds = data.version.builds + const opensearchBuilds = data.version.builds .filter(build => build.projects.opensearch != null) .map(build => { return { @@ -138,7 +138,7 @@ async function resolve (version, hash) { }) if (hash != null) { - const build = esBuilds.find(build => build.projects.commit_hash === hash) + const build = opensearchBuilds.find(build => build.projects.commit_hash === hash) if (!build) { throw new Error(`Can't find any build with hash '${hash}'`) } @@ -151,7 +151,7 @@ async function resolve (version, hash) { } } - const lastBuild = esBuilds[0] + const lastBuild = opensearchBuilds[0] const zipKey = Object.keys(lastBuild.projects.packages).find(key => key.startsWith('rest-resources-zip-') && key.endsWith('.zip')) return { url: lastBuild.projects.packages[zipKey].url, diff --git a/scripts/utils/clone-opensearch.js b/scripts/utils/clone-opensearch.js index f2d703376..6e52fa33b 100644 --- a/scripts/utils/clone-opensearch.js +++ b/scripts/utils/clone-opensearch.js @@ -34,36 +34,36 @@ const { accessSync, mkdirSync } = require('fs') const { join } = require('path') const Git = require('simple-git') -const osRepo = 'https://github.com/opensearch-project/opensearch.git' -const osFolder = join(__dirname, '..', '..', 'opensearch') -const apiFolder = join(osFolder, 'rest-api-spec', 'src', 'main', 'resources', 'rest-api-spec', 'api') +const opensearchRepo = 'https://github.com/opensearch-project/OpenSearch.git' +const opensearchFolder = join(__dirname, '..', '..', 'opensearch') +const apiFolder = join(opensearchFolder, 'rest-api-spec', 'src', 'main', 'resources', 'rest-api-spec', 'api') -function cloneAndCheckout(opts, callback) { +function cloneAndCheckout (opts, callback) { const { log, tag, branch } = opts withTag(tag, callback) /** * Sets the opensearch repository to the given tag. - * If the repository is not present in `osFolder` it will + * If the repository is not present in `opensearchFolder` it will * clone the repository and the checkout the tag. * If the repository is already present but it cannot checkout to * the given tag, it will perform a pull and then try again. * @param {string} tag * @param {function} callback */ - function withTag(tag, callback) { + function withTag (tag, callback) { let fresh = false let retry = 0 - if (!pathExist(osFolder)) { - if (!createFolder(osFolder)) { + if (!pathExist(opensearchFolder)) { + if (!createFolder(opensearchFolder)) { log.fail('Failed folder creation') return } fresh = true } - const git = Git(osFolder) + const git = Git(opensearchFolder) if (fresh) { clone(checkout) @@ -73,7 +73,7 @@ function cloneAndCheckout(opts, callback) { checkout() } - function checkout(alsoPull = false) { + function checkout (alsoPull = false) { if (branch) { log.text = `Checking out branch '${branch}'` } else { @@ -94,7 +94,7 @@ function cloneAndCheckout(opts, callback) { }) } - function pull(cb) { + function pull (cb) { log.text = 'Pulling opensearch repository...' git.pull(err => { if (err) { @@ -105,9 +105,9 @@ function cloneAndCheckout(opts, callback) { }) } - function clone(cb) { + function clone (cb) { log.text = 'Cloning opensearch repository...' - git.clone(osRepo, osFolder, err => { + git.clone(opensearchRepo, opensearchFolder, err => { if (err) { callback(err, { apiFolder }) return @@ -122,7 +122,7 @@ function cloneAndCheckout(opts, callback) { * @param {string} path * @returns {boolean} true if exists, false if not */ - function pathExist(path) { + function pathExist (path) { try { accessSync(path) return true @@ -136,7 +136,7 @@ function cloneAndCheckout(opts, callback) { * @param {string} name * @returns {boolean} true on success, false on failure */ - function createFolder(name) { + function createFolder (name) { try { mkdirSync(name) return true diff --git a/scripts/utils/generateDocs.js b/scripts/utils/generateDocs.js index 85d21d768..0f68af3e5 100644 --- a/scripts/utils/generateDocs.js +++ b/scripts/utils/generateDocs.js @@ -76,9 +76,9 @@ function generateDocs (common, spec) { == API Reference - This document contains the entire list of the OpenSearchAPI supported by the client, both OSS and commercial. The client is entirely licensed under Apache 2.0. + This document contains the entire list of the OpenSearch API supported by the client, both OSS and commercial. The client is entirely licensed under Apache 2.0. - OpenSearchexposes an HTTP layer to communicate with, and the client is a library that will help you do this. Because of this reason, you will see HTTP related parameters, such as ${'`'}body${'`'} or ${'`'}headers${'`'}. + OpenSearch exposes an HTTP layer to communicate with, and the client is a library that will help you do this. Because of this reason, you will see HTTP related parameters, such as ${'`'}body${'`'} or ${'`'}headers${'`'}. Every API can accept two objects, the first contains all the parameters that will be sent to OpenSearch, while the second includes the request specific parameters, such as timeouts, headers, and so on. In the first object, every parameter but the body will be sent via querystring or url parameter, depending on the API, and every unrecognized parameter will be sent as querystring. @@ -276,7 +276,7 @@ function fixLink (name, str) { if (override) return override if (!str) return '' /* Replace references to the guide with the attribute {ref} because - * the json files in the OpenSearchrepo are a bit of a mess. */ + * the json files in the OpenSearch repo are a bit of a mess. */ str = str.replace(/^.+guide\/en\/opensearch\/reference\/[^/]+\/([^./]*\.html(?:#.+)?)$/, '{ref}/$1') str = str.replace(/frozen\.html/, 'freeze-index-api.html') str = str.replace(/ml-file-structure\.html/, 'ml-find-file-structure.html') diff --git a/scripts/wait-cluster.sh b/scripts/wait-cluster.sh index aa6274720..09eb85ee0 100755 --- a/scripts/wait-cluster.sh +++ b/scripts/wait-cluster.sh @@ -1,10 +1,10 @@ #!/bin/bash -TEST_ES_SERVER=${TEST_ES_SERVER:-"http://localhost:9200"} +TEST_OPENSEARCH_SERVER=${TEST_OPENSEARCH_SERVER:-"http://localhost:9200"} attempt_counter=0 max_attempts=5 -url="${TEST_ES_SERVER}/_cluster/health?wait_for_status=green&timeout=50s" +url="${TEST_OPENSEARCH_SERVER}/_cluster/health?wait_for_status=green&timeout=50s" echo "Waiting for OpenSearch..." while [[ "$(curl -s -o /dev/null -w ''%{http_code}'' --max-time 55 "$url")" != "200" ]]; do diff --git a/test/acceptance/product-check.test.js b/test/acceptance/product-check.test.js index 3b645f703..0b30b7fe7 100644 --- a/test/acceptance/product-check.test.js +++ b/test/acceptance/product-check.test.js @@ -228,7 +228,7 @@ test('Auth error - 401', t => { process.on('warning', onWarning) function onWarning (warning) { - t.equal(warning.message, 'The client is unable to verify that the server is OpenSearchdue to security privileges on the server side. Some functionality may not be compatible if the server is running an unsupported product.') + t.equal(warning.message, 'The client is unable to verify that the server is OpenSearch due to security privileges on the server side. Some functionality may not be compatible if the server is running an unsupported product.') } const requests = [{ @@ -279,7 +279,7 @@ test('Auth error - 403', t => { process.on('warning', onWarning) function onWarning (warning) { - t.equal(warning.message, 'The client is unable to verify that the server is OpenSearchdue to security privileges on the server side. Some functionality may not be compatible if the server is running an unsupported product.') + t.equal(warning.message, 'The client is unable to verify that the server is OpenSearch due to security privileges on the server side. Some functionality may not be compatible if the server is running an unsupported product.') } const requests = [{ diff --git a/test/benchmarks/suite.js b/test/benchmarks/suite.js index 80f7b8148..6305bacbe 100644 --- a/test/benchmarks/suite.js +++ b/test/benchmarks/suite.js @@ -192,8 +192,8 @@ function buildBenchmark (options = {}) { const git = Git(__dirname) const commit = await git.log(['-1']) const branch = await git.revparse(['--abbrev-ref', 'HEAD']) - const { body: osInfo } = await client.info() - const { body: osNodes } = await client.nodes.stats({ metric: 'os' }) + const { body: opensearchInfo } = await client.info() + const { body: opensearchNodes } = await client.nodes.stats({ metric: 'os' }) const results = reports.map(report => { return { @@ -230,8 +230,8 @@ function buildBenchmark (options = {}) { } }, server: { - version: osInfo.version.number, - nodes_info: osNodes + version: opensearchInfo.version.number, + nodes_info: opensearchNodes } } }) diff --git a/test/fixtures/stackoverflow.ndjson b/test/fixtures/stackoverflow.ndjson index bb6883bc0..eb1592d29 100644 --- a/test/fixtures/stackoverflow.ndjson +++ b/test/fixtures/stackoverflow.ndjson @@ -328,7 +328,7 @@ {"id":"22054301","title":"How to properly index MongoDB queries with multiple $and and $or statements","body":"\u003cp\u003eI have a collection in MongoDB (app_logins) that hold documents with the following structure:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e{\n \"_id\" : \"c8535f1bd2404589be419d0123a569de\"\n \"app\" : \"MyAppName\",\n \"start\" : ISODate(\"2014-02-26T14:00:03.754Z\"),\n \"end\" : ISODate(\"2014-02-26T15:11:45.558Z\")\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eSince the documentation says that the queries in an $or can be executed in parallel and can use separate indices, and I assume the same holds true for $and, I added the following indices:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003edb.app_logins.ensureIndex({app:1})\ndb.app_logins.ensureIndex({start:1})\ndb.app_logins.ensureIndex({end:1})\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eBut when I do a query like this, way too many documents are scanned:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003edb.app_logins.find(\n{\n $and:[\n { app : \"MyAppName\" },\n {\n $or:[\n {\n $and:[\n { start : { $gte:new Date(1393425621000) }},\n { start : { $lte:new Date(1393425639875) }}\n ]\n },\n {\n $and:[\n { end : { $gte:new Date(1393425621000) }},\n { end : { $lte:new Date(1393425639875) }}\n ]\n },\n {\n $and:[\n { start : { $lte:new Date(1393425639875) }},\n { end : { $gte:new Date(1393425621000) }}\n ]\n }\n ]\n }\n ]\n}\n).explain()\n\n{\n \"cursor\" : \"BtreeCursor app_1\",\n \"isMultiKey\" : true,\n \"n\" : 138,\n \"nscannedObjects\" : 10716598,\n \"nscanned\" : 10716598,\n \"nscannedObjectsAllPlans\" : 10716598,\n \"nscannedAllPlans\" : 10716598,\n \"scanAndOrder\" : false,\n \"indexOnly\" : false,\n \"nYields\" : 30658,\n \"nChunkSkips\" : 0,\n \"millis\" : 38330,\n \"indexBounds\" : {\n \"app\" : [\n [\n \"MyAppName\",\n \"MyAppName\"\n ]\n ]\n },\n \"server\" : \"127.0.0.1:27017\"\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eI know that this can be caused because 10716598 match the 'app' field, but the other query can return a much smaller subset.\u003c/p\u003e\n\n\u003cp\u003eIs there any way I can optimize this? The aggregation framework comes to mind, but I was thinking that there may be a better way to optimize this, possibly using indexes.\u003c/p\u003e\n\n\u003cp\u003e\u003cstrong\u003eEdit:\u003c/strong\u003e\u003c/p\u003e\n\n\u003cp\u003eLooks like if I add an index on app-start-end, as Josh suggested, I am getting better results. I am not sure if I can optimize this further this way, but the results are much better:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e{\n \"cursor\" : \"BtreeCursor app_1_start_1_end_1\",\n \"isMultiKey\" : false,\n \"n\" : 138,\n \"nscannedObjects\" : 138,\n \"nscanned\" : 8279154,\n \"nscannedObjectsAllPlans\" : 138,\n \"nscannedAllPlans\" : 8279154,\n \"scanAndOrder\" : false,\n \"indexOnly\" : false,\n \"nYields\" : 2934,\n \"nChunkSkips\" : 0,\n \"millis\" : 13539,\n \"indexBounds\" : {\n \"app\" : [\n [\n \"MyAppName\",\n \"MyAppName\"\n ]\n ],\n \"start\" : [\n [\n {\n \"$minElement\" : 1\n },\n {\n \"$maxElement\" : 1\n }\n ]\n ],\n \"end\" : [\n [\n {\n \"$minElement\" : 1\n },\n {\n \"$maxElement\" : 1\n }\n ]\n ]\n },\n \"server\" : \"127.0.0.1:27017\"\n}\n\u003c/code\u003e\u003c/pre\u003e","accepted_answer_id":"22054871","answer_count":"2","comment_count":"3","creation_date":"2014-02-26 21:59:35.957 UTC","last_activity_date":"2014-02-27 03:47:40.007 UTC","last_edit_date":"2014-02-26 22:17:31.547 UTC","last_editor_display_name":"","last_editor_user_id":"333918","owner_display_name":"","owner_user_id":"333918","post_type_id":"1","score":"0","tags":"mongodb","view_count":"105"} {"id":"45313360","title":"getDisplayName() and getPhotoUrl() without signing in user to android firebase","body":"\u003cp\u003eI am creating a login page in android using Firebase Email Password Authentication and I want when a user enters its Email address and shift to password the system automatically get the PhotoUrl and DisplayName and display on the Login page Before a user enters His Full Password.\u003c/p\u003e","answer_count":"1","comment_count":"3","creation_date":"2017-07-25 20:58:38.357 UTC","last_activity_date":"2017-07-25 22:02:32.61 UTC","last_edit_date":"2017-07-25 21:56:59.87 UTC","last_editor_display_name":"","last_editor_user_id":"5246885","owner_display_name":"","owner_user_id":"8366127","post_type_id":"1","score":"0","tags":"android|firebase|firebase-authentication|firebase-storage","view_count":"119"} {"id":"44604596","title":"Need to open permission to 766 to let PHP to edit files","body":"\u003cp\u003eAs the title described, I have tried making the file which I need to edit permission to 764 and it didn't work.\u003cbr\u003e\nI don't have permission to the php config and other main configuration, do I have any options other than using 766?\nAdditionally, will 766 let other to edit my file over HTTP?\u003c/p\u003e","answer_count":"1","comment_count":"3","creation_date":"2017-06-17 12:22:37.48 UTC","last_activity_date":"2017-06-17 13:47:27.22 UTC","last_edit_date":"2017-06-17 13:47:27.22 UTC","last_editor_display_name":"","last_editor_user_id":"7901773","owner_display_name":"","owner_user_id":"7901773","post_type_id":"1","score":"1","tags":"php|apache|file-permissions","view_count":"43"} -{"id":"34293411","title":"OpenSearchparent - child mapping: Search in both and highlight","body":"\u003cp\u003eI have the following opensearch 1.6.2 index mappings: parent \u003cstrong\u003eitem\u003c/strong\u003e and child \u003cstrong\u003edocument\u003c/strong\u003e. One item can have several documents. Documents are \u003cstrong\u003enot\u003c/strong\u003e nested because they contain base64 data (mapper-attachments-plugin) and cannot be updated with an item.\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e\"mappings\" : {\n \"document\" : {\n \"_parent\" : {\n \"type\" : \"item\"\n }, \n \"_routing\" : {\n \"required\" : true\n },\n \"properties\" : {\n \"extension\" : {\n \"type\" : \"string\",\n \"term_vector\" : \"with_positions_offsets\", \n \"include_in_all\" : true\n }, ...\n },\n }\n \"item\" : { \n \"properties\" : {\n \"prop1\" : {\n \"type\" : \"string\",\n \"include_in_all\" : true\n }, ...\n }\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eI like to search in \u003cstrong\u003eboth\u003c/strong\u003e indices but always return \u003cstrong\u003eitems\u003c/strong\u003e. If there is a match in an document, return the corresponding item. If there is a match in an item, return the item. If both is true, return the item. \u003c/p\u003e\n\n\u003cp\u003eIs it possible to combine \u003cstrong\u003ehas_child\u003c/strong\u003e and \u003cstrong\u003ehas_parent\u003c/strong\u003e searches?\u003c/p\u003e\n\n\u003cp\u003eThis search only searches in documents and returns items:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e{\n \"query\": {\n \"has_child\": {\n \"type\": \"document\",\n \"query\": {\n \"query_string\":{\"query\":\"her*}\n },\n \"inner_hits\" : {\n \"highlight\" : {\n \"fields\" : {\n \"*\" : {} \n }\n }\n }\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003e\u003cstrong\u003eEXAMPLE\u003c/strong\u003e\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eGET index/item/174\n{\n \"_type\" : \"item\",\n \"_id\" : \"174\",\n \"_source\":{\"prop1\":\"Perjeta construction\"}\n}\n\nGET index/document/116\n{\n \"_type\" : \"document\",\n \"_id\" : \"116\", \n \"_source\":{\"extension\":\"pdf\",\"item\": {\"id\":174},\"fileName\":\"construction plan\"}\n} \n\n__POSSIBLE SEARCH RESULT searching for \"constr*\"__\n\n{\n\"hits\": {\n \"total\": 1,\n \"hits\": [\n {\n \"_type\": \"item\",\n \"_id\": \"174\",\n \"_source\": {\n \"prop1\": \"Perjeta construction\"\n },\n \"highlight\": {\n \"prop1\": [\n \"Perjeta \u0026lt;em\u0026gt;construction\u0026lt;\\/em\u0026gt;\"\n ]\n },\n \"inner_hits\": {\n \"document\": {\n \"hits\": {\n \"hits\": [\n {\n \"_type\": \"document\",\n \"_id\": \"116\",\n \"_source\": {\n \"extension\": \"pdf\",\n \"item\": {\n \"id\": 174\n }, \n \"fileName\": \"construction plan\"\n },\n \"highlight\": {\n \"fileName\": [\n \"\u0026lt;em\u0026gt;construction\u0026lt;\\/em\u0026gt; plan\"\n ]\n }\n }\n ]\n }\n }\n }\n }\n ]\n}\n}\n\u003c/code\u003e\u003c/pre\u003e","accepted_answer_id":"35230048","answer_count":"1","comment_count":"1","creation_date":"2015-12-15 15:44:25.833 UTC","last_activity_date":"2016-02-05 17:28:08.867 UTC","last_edit_date":"2015-12-15 16:12:35.24 UTC","last_editor_display_name":"","last_editor_user_id":"1056504","owner_display_name":"","owner_user_id":"1056504","post_type_id":"1","score":"0","tags":"opensearch","view_count":"181"} +{"id":"34293411","title":"OpenSearch parent - child mapping: Search in both and highlight","body":"\u003cp\u003eI have the following opensearch 1.6.2 index mappings: parent \u003cstrong\u003eitem\u003c/strong\u003e and child \u003cstrong\u003edocument\u003c/strong\u003e. One item can have several documents. Documents are \u003cstrong\u003enot\u003c/strong\u003e nested because they contain base64 data (mapper-attachments-plugin) and cannot be updated with an item.\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e\"mappings\" : {\n \"document\" : {\n \"_parent\" : {\n \"type\" : \"item\"\n }, \n \"_routing\" : {\n \"required\" : true\n },\n \"properties\" : {\n \"extension\" : {\n \"type\" : \"string\",\n \"term_vector\" : \"with_positions_offsets\", \n \"include_in_all\" : true\n }, ...\n },\n }\n \"item\" : { \n \"properties\" : {\n \"prop1\" : {\n \"type\" : \"string\",\n \"include_in_all\" : true\n }, ...\n }\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eI like to search in \u003cstrong\u003eboth\u003c/strong\u003e indices but always return \u003cstrong\u003eitems\u003c/strong\u003e. If there is a match in an document, return the corresponding item. If there is a match in an item, return the item. If both is true, return the item. \u003c/p\u003e\n\n\u003cp\u003eIs it possible to combine \u003cstrong\u003ehas_child\u003c/strong\u003e and \u003cstrong\u003ehas_parent\u003c/strong\u003e searches?\u003c/p\u003e\n\n\u003cp\u003eThis search only searches in documents and returns items:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e{\n \"query\": {\n \"has_child\": {\n \"type\": \"document\",\n \"query\": {\n \"query_string\":{\"query\":\"her*}\n },\n \"inner_hits\" : {\n \"highlight\" : {\n \"fields\" : {\n \"*\" : {} \n }\n }\n }\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003e\u003cstrong\u003eEXAMPLE\u003c/strong\u003e\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eGET index/item/174\n{\n \"_type\" : \"item\",\n \"_id\" : \"174\",\n \"_source\":{\"prop1\":\"Perjeta construction\"}\n}\n\nGET index/document/116\n{\n \"_type\" : \"document\",\n \"_id\" : \"116\", \n \"_source\":{\"extension\":\"pdf\",\"item\": {\"id\":174},\"fileName\":\"construction plan\"}\n} \n\n__POSSIBLE SEARCH RESULT searching for \"constr*\"__\n\n{\n\"hits\": {\n \"total\": 1,\n \"hits\": [\n {\n \"_type\": \"item\",\n \"_id\": \"174\",\n \"_source\": {\n \"prop1\": \"Perjeta construction\"\n },\n \"highlight\": {\n \"prop1\": [\n \"Perjeta \u0026lt;em\u0026gt;construction\u0026lt;\\/em\u0026gt;\"\n ]\n },\n \"inner_hits\": {\n \"document\": {\n \"hits\": {\n \"hits\": [\n {\n \"_type\": \"document\",\n \"_id\": \"116\",\n \"_source\": {\n \"extension\": \"pdf\",\n \"item\": {\n \"id\": 174\n }, \n \"fileName\": \"construction plan\"\n },\n \"highlight\": {\n \"fileName\": [\n \"\u0026lt;em\u0026gt;construction\u0026lt;\\/em\u0026gt; plan\"\n ]\n }\n }\n ]\n }\n }\n }\n }\n ]\n}\n}\n\u003c/code\u003e\u003c/pre\u003e","accepted_answer_id":"35230048","answer_count":"1","comment_count":"1","creation_date":"2015-12-15 15:44:25.833 UTC","last_activity_date":"2016-02-05 17:28:08.867 UTC","last_edit_date":"2015-12-15 16:12:35.24 UTC","last_editor_display_name":"","last_editor_user_id":"1056504","owner_display_name":"","owner_user_id":"1056504","post_type_id":"1","score":"0","tags":"opensearch","view_count":"181"} {"id":"12093896","title":"Taking a picture and then emailing it","body":"\u003cp\u003eI am trying to create an application where you can take a picture and then email it to someone. At the moment I can take a picture and set my background as this picture:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003epublic class Camera extends Activity implements View.OnClickListener{\n\n\nImageButton ib;\nButton b;\nImageView iv;\nIntent i;\nfinal static int cameraData = 0;\nBitmap bmp;\n\n@Override\nprotected void onCreate(Bundle savedInstanceState) {\n // TODO Auto-generated method stub\n super.onCreate(savedInstanceState);\n setContentView(R.layout.photo);\n initialize();\n InputStream is = getResources().openRawResource(R.drawable.ic_launcher);\n bmp = BitmapFactory.decodeStream(is);\n}\n\nprivate void initialize(){\n ib = (ImageButton) findViewById(R.id.ibTakePic);\n b = (Button) findViewById(R.id.bSetWall);\n iv = (ImageView) findViewById(R.id.ivReturnedPic);\n b.setOnClickListener(this);\n ib.setOnClickListener(this);\n\n\n\n}\n\n@Override\npublic void onClick(View v) {\n File mImageFile;\n // TODO Auto-generated method stub\n switch(v.getId()){\n case R.id.bSetWall:\n try {\n getApplicationContext().setWallpaper(bmp);\n } catch (IOException e) {\n // TODO Auto-generated catch block\n e.printStackTrace();\n }\n\n break;\n case R.id.ibTakePic:\n i = new Intent(android.provider.MediaStore.ACTION_IMAGE_CAPTURE);\n startActivityForResult(i, cameraData);\n break;\n }\n\n}\n\n@Override\nprotected void onActivityResult(int requestCode, int resultCode, Intent data) {\n // TODO Auto-generated method stub\n super.onActivityResult(requestCode, resultCode, data);\n if(resultCode == RESULT_OK){\n Bundle extras = data.getExtras();\n bmp = (Bitmap)extras.get(\"data\");\n iv.setImageBitmap(bmp);\n }\n}\n\n\n\n\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eI have a separate application where I can take in user input and email it to a predefined address:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e public void onClick(View v) {\n // TODO Auto-generated method stub\n convertEditTextVarsIntoStringsAndYesThisIsAMethodWeCreated();\n String emailaddress[] = { \"info@sklep.com\", \"\", };\n String message = emailAdd + name + beginning;\n\n Intent emailIntent = new Intent(android.content.Intent.ACTION_SEND);\n emailIntent.putExtra(android.content.Intent.EXTRA_EMAIL, emailaddress);\n\n emailIntent.setType(\"plain/text\");\n emailIntent.putExtra(android.content.Intent.EXTRA_TEXT, message);\n startActivity(emailIntent);\n\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eHow do I go about emailing the picture that I have taken? Where is it saved and how do I access it so that I can email it?\u003c/p\u003e\n\n\u003cp\u003eMany Thanks\u003c/p\u003e","accepted_answer_id":"12094137","answer_count":"2","comment_count":"0","creation_date":"2012-08-23 14:25:13.293 UTC","last_activity_date":"2013-11-08 05:45:17.12 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"876343","post_type_id":"1","score":"0","tags":"android|android-intent|android-camera","view_count":"143"} {"id":"29457499","title":"In open cv, how can i convert gray scale image back in to RGB image(color)","body":"\u003cp\u003eIn open cv to remove background, using current frame and former frame, i applied absdiff function and created a difference image in gray scale. However, i would like to covert the gray scale image back in to RGB with actual color of the image, but i have no idea how to operate this back in.\nI'm using C++.\nCould any one knowledgeable of open cv help me?\u003c/p\u003e","answer_count":"1","comment_count":"5","creation_date":"2015-04-05 12:55:45.5 UTC","last_activity_date":"2015-04-05 15:25:41.797 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"3455085","post_type_id":"1","score":"0","tags":"c++|image|opencv","view_count":"242"} {"id":"10008551","title":"How to write test cases for assignment","body":"\u003cp\u003eThe part of my assignment is to create tests for each function. This ones kinda long but I am so confused. I put a link below this function so you can see how it looks like\u003cbr\u003e\nfirst code is extremely long because.\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003edef load_profiles(profiles_file, person_to_friends, person_to_networks):\n '''(file, dict of {str : list of strs}, dict of {str : list of strs}) -\u0026gt; NoneType\n Update person to friends and person to networks dictionaries to include\n the data in open file.'''\n\n # for updating person_to_friends dict\n update_p_to_f(profiles_file, person_to_friends)\n update_p_to_n(profiles_file, person_to_networks)\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eheres the whole code: \u003ca href=\"http://shrib.com/8EF4E8Z3\" rel=\"nofollow\"\u003ehttp://shrib.com/8EF4E8Z3\u003c/a\u003e, I tested it through mainblock and it works. \nThis is the text file(profiles_file) we were provided that we are using to convert them :\n\u003ca href=\"http://shrib.com/zI61fmNP\" rel=\"nofollow\"\u003ehttp://shrib.com/zI61fmNP\u003c/a\u003e\u003c/p\u003e\n\n\u003cp\u003eHow do I run test cases for this through nose, what kinda of test outcomes are there? Or am I not being specific enough? \u003c/p\u003e\n\n\u003cp\u003eimport nose\nimport a3_functions\u003c/p\u003e\n\n\u003cp\u003edef test_load_profiles_\u003c/p\u003e\n\n\u003cp\u003eif \u003cstrong\u003ename\u003c/strong\u003e == '\u003cstrong\u003emain\u003c/strong\u003e':\n nose.runmodule()\nI went that far then I didn't know what I can test for the function. \u003c/p\u003e","answer_count":"2","comment_count":"8","creation_date":"2012-04-04 09:36:30.373 UTC","last_activity_date":"2012-10-03 05:32:10.363 UTC","last_edit_date":"2012-10-03 05:32:10.363 UTC","last_editor_display_name":"","last_editor_user_id":"1118932","owner_display_name":"","owner_user_id":"1172182","post_type_id":"1","score":"-2","tags":"python|testing","view_count":"544"} @@ -1580,7 +1580,7 @@ {"id":"23553061","title":"How to call a route by its name from inside a handler?","body":"\u003cp\u003eHow do I properly refer to route names from inside handlers?\u003cbr\u003e\nShould \u003ccode\u003emux.NewRouter()\u003c/code\u003e be assigned globally instead of standing inside a function?\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003efunc AnotherHandler(writer http.ResponseWriter, req *http.Request) {\n url, _ := r.Get(\"home\") // I suppose this 'r' should refer to the router\n http.Redirect(writer, req, url, 302)\n}\n\nfunc main() {\n r := mux.NewRouter()\n r.HandleFunc(\"/\", HomeHandler).Name(\"home\")\n r.HandleFunc(\"/nothome/\", AnotherHandler).Name(\"another\")\n http.Handle(\"/\", r)\n http.ListenAndServe(\":8000\", nil)\n}\n\u003c/code\u003e\u003c/pre\u003e","accepted_answer_id":"23554021","answer_count":"1","comment_count":"0","creation_date":"2014-05-08 21:41:03.08 UTC","favorite_count":"1","last_activity_date":"2014-05-08 23:00:14.37 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"1606248","post_type_id":"1","score":"2","tags":"go|mux|gorilla","view_count":"1434"} {"id":"24844786","title":"How to use post from HttpPost in 4.4.2?","body":"\u003cp\u003eI'm working for hours on this problem but got totally stucked. The following code is running perfectly on my test device with Android 4.1.2, but I can't get it to work with the second device\nrunning 4.4.2. I read moving from \u003ccode\u003eApache httpClient\u003c/code\u003e to \u003ccode\u003eHttpURLConnection\u003c/code\u003e solves the problem for some people, but I am also using the \u003ccode\u003eGET\u003c/code\u003e-method wich works fine. I already tried to add headers, as mentioned on sites I found. That did not work. So can you please help me in getting this working on Android 4.4.2 or give me a hint to the right direction? Thanks.\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e List\u0026lt;NameValuePair\u0026gt; params = new ArrayList\u0026lt;NameValuePair\u0026gt;();\n.\n.\n.\n DefaultHttpClient httpClient = new DefaultHttpClient();\n HttpPost httpPost = new HttpPost(url); \n httpPost.setEntity(new UrlEncodedFormEntity(params));\n HttpResponse httpResponse = httpClient.execute(httpPost);\n HttpEntity httpEntity = httpResponse.getEntity();\n is = httpEntity.getContent();\n\u003c/code\u003e\u003c/pre\u003e","answer_count":"1","comment_count":"5","creation_date":"2014-07-19 20:40:39.657 UTC","last_activity_date":"2014-08-19 04:21:09.687 UTC","last_edit_date":"2014-07-19 20:52:39.177 UTC","last_editor_display_name":"","last_editor_user_id":"1567835","owner_display_name":"","owner_user_id":"3856650","post_type_id":"1","score":"0","tags":"json|apache|httpurlconnection|android-4.4-kitkat","view_count":"494"} {"id":"9298909","title":"Exception thrown while logging in to project in windows 7 but fine with XP","body":"\u003cp\u003eI'm a final year student learning and trying to build a Java project and I've got a project in Java and MSAccess (jdbc). Th project is basically done but only executing fine under windows XP \u0026amp; jdk1.5. But I use windows7 64bit OS and installed jdk1.7. But I'm not able to login to the project. I've done those odbc - system dsn creation procedure both by *.mdb in Access02-03 \u0026amp; in \u003cem\u003e.mdb,\u003c/em\u003e.accdb. but having the same Exception \"unable to connect to the database\". The login gui is taking the value of UserName and password, but as I press the login button it's throwing the exception. It is created in netbeans, though I have the latest version of netbeans installed in my system.It is throwing the same Exception from the commandline as well from netbeans. I have checked all those codings, dsn name, tablenames, ield names, but we all know that Java is a completely platform independent language. So I think there won't be any issues with the version of OS or JDK installed on the system.\u003c/p\u003e","answer_count":"0","comment_count":"3","creation_date":"2012-02-15 18:14:32.923 UTC","last_activity_date":"2012-02-16 10:52:02.097 UTC","last_edit_date":"2012-02-16 10:52:02.097 UTC","last_editor_display_name":"","last_editor_user_id":"21234","owner_display_name":"","owner_user_id":"1211998","post_type_id":"1","score":"0","tags":"java|windows|ms-access","view_count":"35"} -{"id":"29514583","title":"FInd a document several times","body":"\u003cp\u003eI have a list of events in a city, and I display for the city all incoming events, sorted by the start date of the event.\u003c/p\u003e\n\n\u003cp\u003eToday I need to add a new feature : some events can be repeated over time, for exemple a flea all wednesday and friday during 2 month. In this case, I'll need to display this event X times.\u003c/p\u003e\n\n\u003cp\u003eE.g. what should be displayed on the timeline :\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eThe flea - today 2015-04-08 - id = 42\nJustin Bieber concert - today 2015-04-08 - id = 43\nAn other concert - thursday 2015-04-09 - id = 44\nThe flea - friday 2015-04-10 - id = 42\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eThe problem I have is that today, each document in OpenSearchhas the same \u003ccode\u003e_id\u003c/code\u003e than the one in MySQL.\u003c/p\u003e\n\n\u003cp\u003eI know i could stop using \u003ccode\u003e_id\u003c/code\u003e and add a \u003ccode\u003eidEvent\u003c/code\u003e field in the mapping, but this whould change a lot of things in the programm. Is there an elegant way to handle this problem ?\u003c/p\u003e\n\n\u003cp\u003eEdit :\nHere is a sample of my mapping :\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e\"event\": {\n \"properties\": {\n \"title\": {\n \"type\": \"string\"\n },\n \"dateStart\": {\n \"type\": \"date\",\n \"format\": \"yyyy-MM-dd\"\n },\n \"dateEnd\": {\n \"type\": \"date\",\n \"format\": \"yyyy-MM-dd\"\n }\n }\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eAnd I wonder if with something like that I would be able to display several times the event in results, according to its \u003ccode\u003edateStart\u003c/code\u003e :\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e\"event\": {\n \"properties\": {\n \"title\": {\n \"type\": \"string\"\n },\n \"dates\": {\n \"type\": \"nested\",\n \"properties\": {\n \"dateStart\": {\n \"type\": \"date\",\n \"format\": \"yyyy-MM-dd\"\n },\n \"dateEnd\": {\n \"type\": \"date\",\n \"format\": \"yyyy-MM-dd\"\n }\n }\n }\n }\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eRegards,\u003c/p\u003e","answer_count":"0","comment_count":"5","creation_date":"2015-04-08 12:26:36.48 UTC","last_activity_date":"2015-04-08 13:04:57.743 UTC","last_edit_date":"2015-04-08 13:04:57.743 UTC","last_editor_display_name":"","last_editor_user_id":"1219184","owner_display_name":"","owner_user_id":"1219184","post_type_id":"1","score":"0","tags":"opensearch","view_count":"58"} +{"id":"29514583","title":"FInd a document several times","body":"\u003cp\u003eI have a list of events in a city, and I display for the city all incoming events, sorted by the start date of the event.\u003c/p\u003e\n\n\u003cp\u003eToday I need to add a new feature : some events can be repeated over time, for exemple a flea all wednesday and friday during 2 month. In this case, I'll need to display this event X times.\u003c/p\u003e\n\n\u003cp\u003eE.g. what should be displayed on the timeline :\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eThe flea - today 2015-04-08 - id = 42\nJustin Bieber concert - today 2015-04-08 - id = 43\nAn other concert - thursday 2015-04-09 - id = 44\nThe flea - friday 2015-04-10 - id = 42\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eThe problem I have is that today, each document in OpenSearch has the same \u003ccode\u003e_id\u003c/code\u003e than the one in MySQL.\u003c/p\u003e\n\n\u003cp\u003eI know i could stop using \u003ccode\u003e_id\u003c/code\u003e and add a \u003ccode\u003eidEvent\u003c/code\u003e field in the mapping, but this whould change a lot of things in the programm. Is there an elegant way to handle this problem ?\u003c/p\u003e\n\n\u003cp\u003eEdit :\nHere is a sample of my mapping :\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e\"event\": {\n \"properties\": {\n \"title\": {\n \"type\": \"string\"\n },\n \"dateStart\": {\n \"type\": \"date\",\n \"format\": \"yyyy-MM-dd\"\n },\n \"dateEnd\": {\n \"type\": \"date\",\n \"format\": \"yyyy-MM-dd\"\n }\n }\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eAnd I wonder if with something like that I would be able to display several times the event in results, according to its \u003ccode\u003edateStart\u003c/code\u003e :\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e\"event\": {\n \"properties\": {\n \"title\": {\n \"type\": \"string\"\n },\n \"dates\": {\n \"type\": \"nested\",\n \"properties\": {\n \"dateStart\": {\n \"type\": \"date\",\n \"format\": \"yyyy-MM-dd\"\n },\n \"dateEnd\": {\n \"type\": \"date\",\n \"format\": \"yyyy-MM-dd\"\n }\n }\n }\n }\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eRegards,\u003c/p\u003e","answer_count":"0","comment_count":"5","creation_date":"2015-04-08 12:26:36.48 UTC","last_activity_date":"2015-04-08 13:04:57.743 UTC","last_edit_date":"2015-04-08 13:04:57.743 UTC","last_editor_display_name":"","last_editor_user_id":"1219184","owner_display_name":"","owner_user_id":"1219184","post_type_id":"1","score":"0","tags":"opensearch","view_count":"58"} {"id":"20516073","title":"Sprintf of MAC address of available networks","body":"\u003cp\u003eI want to sprintf Mac address of some found networks in this area like this:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e `WiFi connection settings:\n MAC: 00 1E C0 10 3B 19\n SSID: css`\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003emy code is :\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003echar buf[32];\nBYTE MAC[64];\nint i;\n\nfor(i=1;i\u0026lt;15;i++)\n{ \n MyScanResults = WFScanList(i);\n sprintf(buf,\"%s\", MyScanResults.ssid);\n sprintf(\u0026amp;MAC[i*2],\"%02x\", MyScanResults.bssid[i]);\n _dbgwrite(\"SSID: \");\n _dbgwrite(buf);\n _dbgwrite(\"\\n\");\n _dbgwrite(\"MAC: \");\n _dbgwrite(MAC);\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eand Errors are :\u003c/p\u003e\n\n\u003cblockquote\u003e\n \u003cp\u003eC:\\Users\\h\\Desktop\\WiFi test\\taskFlyport.c:22: warning: pointer targets in passing argument 1 of 'sprintf' differ in signedness \u0026lt;\u003c/p\u003e\n \n \u003cp\u003eC:\\Users\\h\\Desktop\\WiFi test\\taskFlyport.c:27: warning: pointer targets in passing argument 1 of '_dbgwrite' differ in signedness\u0026lt;\u003c/p\u003e\n\u003c/blockquote\u003e\n\n\u003cp\u003eis there anyone to tell me where is my problem?\nthanks,regards\u003c/p\u003e","accepted_answer_id":"20516170","answer_count":"2","comment_count":"0","creation_date":"2013-12-11 10:02:51.133 UTC","last_activity_date":"2013-12-12 11:13:14.083 UTC","last_edit_date":"2013-12-11 10:19:09.09 UTC","last_editor_display_name":"","last_editor_user_id":"1859443","owner_display_name":"","owner_user_id":"2426420","post_type_id":"1","score":"1","tags":"c|printf|mac-address","view_count":"1586"} {"id":"43958478","title":"Images not rendering in Phabricator","body":"\u003cp\u003eI have phabricator installed on an EC2 instance. I have configured the application to point our CloudFront domain name. I also set up the s3 region, bucket-name and endpoint. However, I am unable to see the images after uploading through phabricator. In the inspect console, I am seeing a 403 Forbidden error to the path of the file in cloudfront. I am unable to verify if the file was uploaded into my s3 due to the path not being the s3 path.\u003c/p\u003e\n\n\u003cp\u003ePlease advise.\u003c/p\u003e","answer_count":"0","comment_count":"0","creation_date":"2017-05-13 22:14:57.337 UTC","last_activity_date":"2017-05-13 22:14:57.337 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"4400697","post_type_id":"1","score":"0","tags":"amazon-s3|amazon-cloudfront|phabricator","view_count":"19"} {"id":"35695775","title":"Dropzone, how to not process queue if errors exist","body":"\u003cp\u003eSo I have a form with Dropzone, plus another textarea, which I want to submit - if I insert an oversize file or too many I get the \"oversize\" error in the preview container, etc. BUT the form continues to process upon button clicking the form submit (due to my listener). How can I only submit if there file size is correct for both files and doesn't exceed max file limit? I can't see a Dropzone event for say \"no errors\" to add a click event listener - I think I'm close but semi stuck now, I have the below:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e$(function() {\n\nvar minImageWidth = 300, minImageHeight = 300;\n\nDropzone.options.jobApplicationUpload = {\n autoProcessQueue: false,\n addRemoveLinks: true,\n uploadMultiple: true,\n paramName: 'file',\n previewsContainer: '.dropzone-previews',\n acceptedFiles: '.pdf, .doc, .docx',\n maxFiles: 2,\n maxFilesize: 2, // MB \n dictDefaultMessage: '',\n clickable: '.fileinput-button',\n\n accept: function(file, done) { \n\n done();\n },\n\n // The setting up of the dropzone \n init: function() {\n var myDropzone = this; \n\n // First change the button to actually tell Dropzone to process the queue.\n this.element.querySelector(\"button[type=submit]\").addEventListener(\"click\", function(e) {\n\n // Make sure that the form isn't actually being sent.\n if(myDropzone.files.length \u0026gt; 0) {\n\n $('#job-application-container').hide();\n $('#spinner-modal').modal('show');\n $('#spinner-modal p').html('\u0026lt;b\u0026gt;Sending your application,\u0026lt;/b\u0026gt; please wait...\u0026lt;/p\u0026gt;'); \n\n e.preventDefault();\n e.stopPropagation();\n myDropzone.processQueue(); \n }\n\n });\n\n this.on(\"success\", function(files, response) {\n\n\n // Gets triggered when the files have successfully been sent.\n // Redirect user or notify of success.\n\n $('#job-application-container').hide();\n console.log('okay' + response);\n localStorage['success'] = 'test';\n location.reload();\n\n }); \n\n\n\n }\n\n};\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003e});\u003c/p\u003e","answer_count":"1","comment_count":"0","creation_date":"2016-02-29 09:21:01.413 UTC","favorite_count":"1","last_activity_date":"2016-03-26 21:46:21.9 UTC","last_edit_date":"2016-02-29 11:39:05.273 UTC","last_editor_display_name":"","last_editor_user_id":"3820348","owner_display_name":"","owner_user_id":"3820348","post_type_id":"1","score":"2","tags":"javascript|jquery|dropzone.js","view_count":"673"} @@ -2144,7 +2144,7 @@ {"id":"13655960","title":"Eclipse/Maven/Junit : junit throws classnotfound even though the compiled class is in test-classes folder","body":"\u003cp\u003eI recently upgraded my environment from Eclipse Ganymede to Eclipse Juno. My application was using the old maven-eclipse-plugin, so I had to make changes in the .classpath and .project and .settings files so that the m2e plugin in eclipse juno gets all the information correctly. I did this by following this link - \u003ca href=\"http://blog.frankel.ch/migrating-from-m2eclipse-to-m2e\" rel=\"nofollow\"\u003ehttp://blog.frankel.ch/migrating-from-m2eclipse-to-m2e\u003c/a\u003e \u003c/p\u003e\n\n\u003cp\u003eMy application runs perfectly fine using tomcat7 and maven also works fine.\nMy issues started when I tried to run a test as junit test in eclipse. This gives me a ClassNotFoundException. As a side note even if I add my test-classes folder as a classpath variable in eclipse, it still has issues because then it says it cannot find the resources folder. This very same environment worked perfectly fine with the earlier eclipse, maven plugin and classpath configuration. So I don't know what has changed. \u003c/p\u003e\n\n\u003cp\u003eI am sharing with you my project structure and classpath details. Please bear with me as the question is a bit long. \u003c/p\u003e\n\n\u003cul\u003e\n\u003cli\u003eXXX\n\u003cul\u003e\n\u003cli\u003eDBUnit (similar to Web)\u003c/li\u003e\n\u003cli\u003eOthers (similar to Web)\u003c/li\u003e\n\u003cli\u003eWeb\n\u003cul\u003e\n\u003cli\u003esrc/main/java\u003c/li\u003e\n\u003cli\u003esrc/main/resources\u003c/li\u003e\n\u003cli\u003esrc/test/java\u003c/li\u003e\n\u003cli\u003esrc/test/resources\u003c/li\u003e\n\u003cli\u003etarget/classes\u003c/li\u003e\n\u003cli\u003etarget/test-classes\u003c/li\u003e\n\u003cli\u003e.settings\u003c/li\u003e\n\u003cli\u003e.classpath\u003c/li\u003e\n\u003cli\u003e.project\u003c/li\u003e\n\u003c/ul\u003e\u003c/li\u003e\n\u003cli\u003etarget/classes\u003c/li\u003e\n\u003cli\u003e.settings\u003c/li\u003e\n\u003cli\u003e.classpath\u003c/li\u003e\n\u003cli\u003e.project\u003c/li\u003e\n\u003c/ul\u003e\u003c/li\u003e\n\u003c/ul\u003e\n\n\u003cp\u003eThe classpath entry under Web is as follows : \u003c/p\u003e\n\n\u003cpre class=\"lang-xml prettyprint-override\"\u003e\u003ccode\u003e\u0026lt;classpathentry kind=\"src\" output=\"target/classes\" path=\"src/main/java\"/\u0026gt;\n\u0026lt;classpathentry kind=\"src\" output=\"target/test-classes\" path=\"src/test/java\"/\u0026gt;\n\u0026lt;classpathentry excluding=\"**\" kind=\"src\" output=\"target/classes\" path=\"src/main/resources\"/\u0026gt;\n\u0026lt;classpathentry excluding=\"**\" kind=\"src\" output=\"target/test-classes\" path=\"src/test/resources\"/\u0026gt;\n\u0026lt;classpathentry kind=\"con\" path=\"org.eclipse.m2e.MAVEN2_CLASSPATH_CONTAINER\"/\u0026gt;\n\u0026lt;classpathentry kind=\"con\" path=\"org.eclipse.jdt.launching.JRE_CONTAINER\"/\u0026gt;\n\u0026lt;classpathentry kind=\"output\" path=\"target/classes\"/\u0026gt;\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eAnd the classpath entry under XXX is as follows : \u003c/p\u003e\n\n\u003cpre class=\"lang-xml prettyprint-override\"\u003e\u003ccode\u003e\u0026lt;classpathentry kind=\"src\" output=\"Others/target/classes\" path=\"Others/src/main/java\"/\u0026gt;\n\u0026lt;classpathentry kind=\"src\" path=\"DBUnit/src/main/java\"/\u0026gt;\n\u0026lt;classpathentry kind=\"src\" path=\"Web/src/main/java\"/\u0026gt;\n\u0026lt;classpathentry excluding=\"mock/\" kind=\"src\" output=\"Web/target/test-classes\" path=\"Web/src/test/java\"/\u0026gt;\n\u0026lt;classpathentry excluding=\"**\" kind=\"src\" output=\"Web/target/classes\" path=\"Web/src/main/resources\"/\u0026gt;\n\u0026lt;classpathentry kind=\"con\" path=\"org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6\"/\u0026gt;\n\u0026lt;classpathentry kind=\"con\" path=\"org.eclipse.m2e.MAVEN2_CLASSPATH_CONTAINER\"/\u0026gt;\n\u0026lt;classpathentry kind=\"var\" path=\"TOMCAT_HOME/lib/annotations-api.jar\"/\u0026gt;\n\u0026lt;classpathentry kind=\"var\" path=\"TOMCAT_HOME/lib/el-api.jar\"/\u0026gt;\n\u0026lt;classpathentry kind=\"var\" path=\"TOMCAT_HOME/lib/jasper.jar\"/\u0026gt;\n\u0026lt;classpathentry kind=\"var\" path=\"TOMCAT_HOME/lib/jsp-api.jar\"/\u0026gt;\n\u0026lt;classpathentry kind=\"var\" path=\"TOMCAT_HOME/lib/servlet-api.jar\"/\u0026gt;\n\u0026lt;classpathentry kind=\"output\" path=\"target/classes\"/\u0026gt;\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eSo when I clean the project eclipse does not place the main java classes under the the module1/target/classes folder and it also does not copy the resources folder under classes either.\u003c/p\u003e\n\n\u003cp\u003eI have searched around quite a bit regarding this problem. \u003c/p\u003e\n\n\u003cp\u003eOne solution also suggested to import the project into eclipse as a Maven project and update configuration. This splits my project into multiple modules and maven/eclipse throws me the exception - \"Path must include project and resource name\". I don't understand this error either.\u003c/p\u003e\n\n\u003cp\u003eAnother one suggested the removal of excluding=\"**\". I removed it but that did not help either.\u003c/p\u003e\n\n\u003cp\u003eIs there something wrong with the project structure? Does module1 require classpath and project files?\u003c/p\u003e\n\n\u003cp\u003ePlease help, I'll be really grateful. Thanks.\u003c/p\u003e\n\n\u003cp\u003e\u003cstrong\u003eUpdate 03rd Dec 2012\u003c/strong\u003e \u003c/p\u003e\n\n\u003cp\u003eThis is the exception - \u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eClass not found com.iei.gas.service.QuartzTestService\njava.lang.ClassNotFoundException: com.iei.gas.service.QuartzTestService \nat java.net.URLClassLoader$1.run(URLClassLoader.java:366)\nat java.net.URLClassLoader$1.run(URLClassLoader.java:355)\nat java.security.AccessController.doPrivileged(Native Method)\nat java.net.URLClassLoader.findClass(URLClassLoader.java:354)\nat java.lang.ClassLoader.loadClass(ClassLoader.java:423)\nat sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308)\nat java.lang.ClassLoader.loadClass(ClassLoader.java:356)\nat org.eclipse.jdt.internal.junit.runner.RemoteTestRunner.loadClass(RemoteTestRunner.java:693)\nat org.eclipse.jdt.internal.junit.runner.RemoteTestRunner.loadClasses(RemoteTestRunner.java:429)\nat org.eclipse.jdt.internal.junit.runner.RemoteTestRunner.runTests(RemoteTestRunner.java:452)\nat org.eclipse.jdt.internal.junit.runner.RemoteTestRunner.runTests(RemoteTestRunner.java:683)\nat org.eclipse.jdt.internal.junit.runner.RemoteTestRunner.run(RemoteTestRunner.java:390)\nat org.eclipse.jdt.internal.junit.runner.RemoteTestRunner.main(RemoteTestRunner.java:197)\n\u003c/code\u003e\u003c/pre\u003e","answer_count":"0","comment_count":"5","creation_date":"2012-12-01 03:07:15.967 UTC","last_activity_date":"2012-12-03 06:31:36.003 UTC","last_edit_date":"2012-12-03 06:31:36.003 UTC","last_editor_display_name":"","last_editor_user_id":"931293","owner_display_name":"","owner_user_id":"931293","post_type_id":"1","score":"1","tags":"eclipse|maven-2|junit4|m2eclipse|m2e","view_count":"1247"} {"id":"15479561","title":"Android App: Convert 3gp to mp3","body":"\u003cp\u003eMy soundfiles should be changed from 3gp to mp3.\u003c/p\u003e\n\n\u003cp\u003eI've tried to do this with ffmpeg:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003effmpeg -i input.3gp -vn -acodec libmp3lame -ab 64k output.mp3\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eBut the new mp3 file is only 0 KB big.\u003c/p\u003e\n\n\u003cp\u003eCould libmp3lame be the problem? \nIs it even possible to do that in Java?(since I only found c++ examples)\u003c/p\u003e","answer_count":"0","comment_count":"2","creation_date":"2013-03-18 14:42:48.477 UTC","last_activity_date":"2013-03-18 15:02:44.163 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"2043332","post_type_id":"1","score":"0","tags":"java|android|eclipse|ffmpeg|mp3","view_count":"689"} {"id":"16144470","title":"Get last_insert_id with singleton pattern","body":"\u003cp\u003eI was wondering about a thing when using the singleton pattern on a database connection class. \u003c/p\u003e\n\n\u003cp\u003eAs I understand it, the singleton pattern prevents the creation of more then 1 object of a given class that uses the pattern. \u003c/p\u003e\n\n\u003cp\u003eLets say I need the id from a row I just inserted which I get via the \u003ccode\u003emysqli::$insert_id\u003c/code\u003e.\nWhat if another use of the connection object was used to insert a row at the same time, might that result in a chance of returning a different id then the one expected or is it certain always to return the right id?\u003c/p\u003e\n\n\u003cp\u003eSorry for the newbie question, I have just been wondering whether there were a tiny chance on a multiuser application that getting the id this way might be inconsistent.\u003c/p\u003e\n\n\u003cp\u003eThanks in advance.\u003c/p\u003e","answer_count":"1","comment_count":"2","creation_date":"2013-04-22 10:08:43.537 UTC","last_activity_date":"2013-04-22 10:40:13.373 UTC","last_edit_date":"2013-04-22 10:40:13.373 UTC","last_editor_display_name":"","last_editor_user_id":"2269749","owner_display_name":"","owner_user_id":"649717","post_type_id":"1","score":"1","tags":"php|oop|singleton","view_count":"241"} -{"id":"19326117","title":"OpenSearchNest, parent/child relationship","body":"\u003cp\u003ecan you help me out to define a parent/child relationship using \u003ccode\u003eNESTclient\u003c/code\u003e for opensearch?\nmy code looks like this:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e[ElasticType(Name = \"type_properties\", DateDetection = true,.....)]\npublic class Properties{....}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003chr\u003e\n\n\u003cpre\u003e\u003ccode\u003e[ElasticType(Name = \"type_sales\", DateDetection = true, , ParentType = \"type_properties\")]\npublic class SalesHistory{....}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eI defined the parentType, but I don't see this sales documents related to a parent property.\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e{\n \"_index\": \"testparentchild\",\n \"_type\": \"type_sales\",\n \"_id\": \"dVd1tUJ0SNyoiSer7sNA\",\n \"_version\": 1,\n \"_score\": 1,\n \"_source\": {\n \"salesRecId\": 179504762,\n \"salesPrice\": 150000,\n \"salesDate\": \"2003-04-07T00:00:00\",\n }\n}\n\u003c/code\u003e\u003c/pre\u003e","accepted_answer_id":"19360482","answer_count":"1","comment_count":"0","creation_date":"2013-10-11 19:50:34.827 UTC","last_activity_date":"2016-12-09 09:58:28.047 UTC","last_edit_date":"2016-12-09 09:58:28.047 UTC","last_editor_display_name":"","last_editor_user_id":"6340959","owner_display_name":"","owner_user_id":"2824011","post_type_id":"1","score":"0","tags":"opensearch|nest","view_count":"1534"} +{"id":"19326117","title":"OpenSearch Nest, parent/child relationship","body":"\u003cp\u003ecan you help me out to define a parent/child relationship using \u003ccode\u003eNESTclient\u003c/code\u003e for opensearch?\nmy code looks like this:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e[ElasticType(Name = \"type_properties\", DateDetection = true,.....)]\npublic class Properties{....}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003chr\u003e\n\n\u003cpre\u003e\u003ccode\u003e[ElasticType(Name = \"type_sales\", DateDetection = true, , ParentType = \"type_properties\")]\npublic class SalesHistory{....}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eI defined the parentType, but I don't see this sales documents related to a parent property.\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e{\n \"_index\": \"testparentchild\",\n \"_type\": \"type_sales\",\n \"_id\": \"dVd1tUJ0SNyoiSer7sNA\",\n \"_version\": 1,\n \"_score\": 1,\n \"_source\": {\n \"salesRecId\": 179504762,\n \"salesPrice\": 150000,\n \"salesDate\": \"2003-04-07T00:00:00\",\n }\n}\n\u003c/code\u003e\u003c/pre\u003e","accepted_answer_id":"19360482","answer_count":"1","comment_count":"0","creation_date":"2013-10-11 19:50:34.827 UTC","last_activity_date":"2016-12-09 09:58:28.047 UTC","last_edit_date":"2016-12-09 09:58:28.047 UTC","last_editor_display_name":"","last_editor_user_id":"6340959","owner_display_name":"","owner_user_id":"2824011","post_type_id":"1","score":"0","tags":"opensearch|nest","view_count":"1534"} {"id":"46334908","title":"Loading failed javacript file when deploy web application on weblogic","body":"\u003cp\u003ei have a problem with load javacript file on jsp page when deploy my web application on weblogic server. Before I deploy it on Tomcat 7 and it work normally.\u003c/p\u003e\n\n\u003cul\u003e\n\u003cli\u003eFirst I see on console window of firefox. My jsp page couldn't load js file on \u003cstrong\u003e\u003cem\u003e/resources/\u003c/em\u003e\u003c/strong\u003e folder (this folder is the same level with \u003cstrong\u003e\u003cem\u003e/WEB-INF/\u003c/em\u003e\u003c/strong\u003e):\u003c/li\u003e\n\u003c/ul\u003e\n\n\u003cblockquote\u003e\n \u003cp\u003eLoading failed for the \u003ccode\u003e\u0026lt;script\u0026gt;\u003c/code\u003e with source “http ://10.3.11.25:7001/resources/assets/global/plugins/jquery.min.js”. 10.3.11.25:7001:104\u003c/p\u003e\n\u003c/blockquote\u003e\n\n\u003cp\u003eImage i have capture:\n\u003ca href=\"https://i.stack.imgur.com/TJSLf.png\" rel=\"nofollow noreferrer\"\u003eConsole log of browser\u003c/a\u003e\u003c/p\u003e\n\n\u003cul\u003e\n\u003cli\u003eI try copy the url: “http ://10.3.11.25:7001/resources/assets/global/plugins/jquery.min.js” to address bar. \u003c/li\u003e\n\u003c/ul\u003e\n\n\u003cp\u003e=\u003e I can access it, but i only can download the js file (It not display the source code on browser as normally).\u003c/p\u003e\n\n\u003cp\u003e=\u003e What is my problem. I deploy my web application on weblogic 12c\u003c/p\u003e\n\n\u003cp\u003e\u003cstrong\u003eUPDATE:\u003c/strong\u003e\u003c/p\u003e\n\n\u003cul\u003e\n\u003cli\u003eNetwork tab load js file ok, all status is 200: \n\u003ca href=\"https://i.stack.imgur.com/E9qjw.png\" rel=\"nofollow noreferrer\"\u003eCapture image\u003c/a\u003e\u003c/li\u003e\n\u003cli\u003eSource code include on jsp:\u003c/li\u003e\n\u003c/ul\u003e\n\n\u003cp\u003e\u003ccode\u003e\u0026lt;script src=\"resources/assets/global/plugins/jquery.min.js\"\n type=\"text/javascript\"\u0026gt;\u0026lt;/script\u0026gt;\n\u0026lt;script src=\"resources/assets/global/plugins/jquery-migrate.min.js\"\n type=\"text/javascript\"\u0026gt;\u0026lt;/script\u0026gt;\u003c/code\u003e\u003c/p\u003e\n\n\u003cp\u003e\u003cstrong\u003eUPDATE 2:\u003c/strong\u003e\u003c/p\u003e\n\n\u003cul\u003e\n\u003cli\u003eAll status is 200 but load O KB and response is notthing\u003c/li\u003e\n\u003cli\u003eWhen i copy the js url to address bar it show popup download it (not display the source code as normally)\u003c/li\u003e\n\u003c/ul\u003e\n\n\u003cp\u003ePS: Sorry i can post more than 2 picture.\u003c/p\u003e","accepted_answer_id":"46356494","answer_count":"1","comment_count":"10","creation_date":"2017-09-21 03:17:09.437 UTC","last_activity_date":"2017-09-22 03:44:41.263 UTC","last_edit_date":"2017-09-21 03:40:25.14 UTC","last_editor_display_name":"","last_editor_user_id":"8645550","owner_display_name":"","owner_user_id":"8645550","post_type_id":"1","score":"0","tags":"javascript|deployment|weblogic12c","view_count":"70"} {"id":"16250969","title":"How to render Backbone el correctly into the view page","body":"\u003cp\u003eI'm trying to working correctly with my first \u003ccode\u003eBackbone\u003c/code\u003e app and trying to render it into my page.\u003c/p\u003e\n\n\u003cp\u003eI've wrote this app but I didn't got how I shoud put the app html rendered in the html view:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e\u0026lt;script type=\"text/javascript\"\u0026gt;\n$(function(){\n var SearchApp = new Search.Views.App({\n id:\"product-name-results\"\n });\n SearchApp.render();\n});\n\u0026lt;script\u0026gt;\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eThis is my app\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003evar Search = {\n Models: {},\n Collections: {},\n Views: {},\n Templates:{}\n}\n\nSearch.Views.App = Backbone.View.extend({\n initialize:function () {\n console.log('Search.Views.App initialize')\n }, \n render:function (options) {\n this.$el.html('hello world');\n }\n});\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eObviously this render method not appending in the \u003ccode\u003ehtml view\u003c/code\u003e, but how to append it into the view?\u003c/p\u003e","accepted_answer_id":"16251013","answer_count":"1","comment_count":"0","creation_date":"2013-04-27 10:27:02.6 UTC","last_activity_date":"2013-04-27 10:40:12.98 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"185921","post_type_id":"1","score":"0","tags":"backbone.js|render|el","view_count":"89"} {"id":"4836723","title":"Trouble with inserting notes into a JTree","body":"\u003cp\u003eI have a \u003ccode\u003eJTree\u003c/code\u003e which is constructed with the following method:\u003cbr\u003e\n(The \u003ccode\u003eBKMNode\u003c/code\u003e class extends \u003ccode\u003eDefaultMutableTreeNode\u003c/code\u003e, and the\u003ccode\u003eDataNode\u003c/code\u003e simply holds the data) \u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e void populateTree(BKMNode parent) {\n for (DataNode node : nodes) {\n BKMNode treeNode = new BKMNode(node.name,node.fullName,null);\n // check if this node was already added before\n if (! existingNodes.contains(ip + \".\" + node.fullName)) {\n existingNodes.add(ip + \".\" + node.fullName);\n DefaultTreeModel model = (DefaultTreeModel)tree.getModel();\n model.insertNodeInto(treeNode, parent, parent.getChildCount());\n System.out.println(\"adding \" + ip + \".\" + node.fullName);\n }\n node.populateTree(treeNode);\n }\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003e\u003ccode\u003e// some more non-relevant code...\u003c/code\u003e\u003c/p\u003e\n\n\u003cp\u003eWhen the tree is created at the application startup, everything is fine.\u003cbr\u003e\nBut once in a while my application adds nodes to the tree using the same method.\u003cbr\u003e\nWhen the application attempts to add a new node to the tree in does print the text, but nothing changes on the \u003ccode\u003eGUI\u003c/code\u003e.\u003cbr\u003e\nI tried calling \u003ccode\u003eJTree.invalidate()\u003c/code\u003e, \u003ccode\u003evalidate()\u003c/code\u003e, \u003ccode\u003erepaint()\u003c/code\u003e, \u003ccode\u003ereload()\u003c/code\u003e but nothing seems to help. \u003c/p\u003e\n\n\u003cp\u003eThe \u003ccode\u003epopulateTree\u003c/code\u003e method is always called from the \u003ccode\u003eEDT\u003c/code\u003e. \u003c/p\u003e\n\n\u003cp\u003eDoes anyone know what's the problems here? \u003c/p\u003e\n\n\u003cp\u003eThanks a lot in advance! \u003c/p\u003e","answer_count":"2","comment_count":"8","creation_date":"2011-01-29 11:59:40.373 UTC","favorite_count":"0","last_activity_date":"2014-06-25 11:28:14.557 UTC","last_edit_date":"2014-06-25 11:28:14.557 UTC","last_editor_display_name":"","last_editor_user_id":"3485434","owner_display_name":"","owner_user_id":"594926","post_type_id":"1","score":"0","tags":"java|swing|jtree","view_count":"1088"} @@ -3088,7 +3088,7 @@ {"id":"26556288","title":"texture2d rectangle XNA wont initialize","body":"\u003cp\u003eI have a rather basic Texture2D name rect and I am just trying to initialize it. It tells me that a field initializer cannot reference the non-static field, method or property \"graphics\"\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e public class Game1 : Microsoft.Xna.Framework.Game\n{\n GraphicsDeviceManager graphics;\n SpriteBatch spriteBatch;\n\n //my variables and stuff I delcare\n\n //texture we can render\n Texture2D myTexture;\n\n //set coords to draw the spirte\n Vector2 spritePos = new Vector2(300.0f, 330.0f);\n\n //some info about motion\n Vector2 spriteSpeed = new Vector2(0f, 0f);\n\n KeyboardState oldState;\n double boost = 15;\n\n //boost level rectange this is the issue below+\n Texture2D rect = new Texture2D(graphics.GraphicsDevice, 80, 30);\n\n public Game1()\n {\n graphics = new GraphicsDeviceManager(this);\n Content.RootDirectory = \"Content\";\n }\n\u003c/code\u003e\u003c/pre\u003e","accepted_answer_id":"26556335","answer_count":"1","comment_count":"1","creation_date":"2014-10-24 21:07:06.167 UTC","last_activity_date":"2014-10-24 21:11:16.023 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"3908256","post_type_id":"1","score":"0","tags":"c|xna","view_count":"49"} {"id":"14032208","title":"ToList slow performance vs foreach slow performance","body":"\u003cp\u003eI am building program that use DataBase with 3 tables(Worker, Task, TaskStep)\nand i have a method that get date and build report for specific worker\nof the task and there steps for the specific day.\u003c/p\u003e\n\n\u003cp\u003eThe data base structure is as follow:\u003c/p\u003e\n\n\u003cp\u003eMySQL 5.2\u003c/p\u003e\n\n\u003cp\u003e\u003ccode\u003eWorker\u003c/code\u003e table columns:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eworkerID(VARCHAR(45)),\nname(VARCHAR(45)),\nage(int),\n...\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003e\u003ccode\u003eTasks\u003c/code\u003e table columns:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eTaskID(VARCHAR(45)),\ndescription(VARCHAR(45)),\ndate(DATE),\n...\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003e\u003ccode\u003eTaskSteps\u003c/code\u003e table columns:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eTaskStepID(VARCHAR(45)),\ndescription(VARCHAR(45)),\ndate(DATE),\n...\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eNo indexing on any table\u003c/p\u003e\n\n\u003cp\u003eThe problem is thats it is very very slow!! (~ 20 seconds)\u003c/p\u003e\n\n\u003cp\u003eHere is the code:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eusing WorkerDailyReport = Dictionary\u0026lt;task, IEnumerable\u0026lt;taskStep\u0026gt;\u0026gt;;\n\nprivate void Buildreport(DateTime date)\n{\n var report = new WorkerDailyReport(); \n\n // Load from DB\n var sw = new Stopwatch();\n sw.Start();\n\n var startOfDay = date.Date;\n var endOfDay = startOfDay.AddDays(1);\n var db = new WorkEntities();\n\n const string workerID = \"80900855\";\n\n IEnumerable\u0026lt;task\u0026gt; _tasks = db.task\n .Where(ta =\u0026gt; ta.date \u0026gt;= startOfDay \u0026amp;\u0026amp;\n ta.date \u0026lt; endOfDay \u0026amp;\u0026amp;\n ta.workerID == workerID)\n .ToList();\n\n sw.Stop();\n Console.WriteLine(\"Load From DB time - \" + sw.Elapsed + \n \", Count - \" + _tasks.Count()); \n\n // Build the report\n sw.Restart();\n\n foreach (var t in _tasks)\n {\n var ts = db.taskStep.Where(s =\u0026gt; s.taskID == task.taskID);\n\n report.Add(t, ts);\n }\n\n sw.Stop();\n Console.WriteLine(\"Build report time - \" + sw.Elapsed);\n\n // Do somthing with the report\n foreach (var t in report)\n {\n sw.Restart();\n\n foreach (var subNode in t.Value)\n {\n // Do somthing..\n }\n\n Console.WriteLine(\"Do somthing time - \" + sw.Elapsed + \n \", Count - \" + t.Value.Count());\n }\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eAs u can see i put StopWatch in each part to check what take so long\nand this is the results:\u003c/p\u003e\n\n\u003cp\u003e1)\u003c/p\u003e\n\n\u003cp\u003eIf i run the code as above:\u003c/p\u003e\n\n\u003cp\u003eConsole:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eLoad From DB time - 00:00:00.0013774, Count - 577\n\nBuild report time - 00:00:03.6305722\n\nDo somthing time - 00:00:07.7573754, Count - 21\n\nDo somthing time - 00:00:08.2811928, Count - 11\n\nDo somthing time - 00:00:07.8715531, Count - 14\n\nDo somthing time - 00:00:08.0430597, Count - 0\n\nDo somthing time - 00:00:07.7867790, Count - 9\n\nDo somthing time - 00:00:07.3485209, Count - 39\n\n.........\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003ethe inner foreach run takes about 7-9!! Sec to run over no more then\n40 record.\u003c/p\u003e\n\n\u003cp\u003e2)\u003c/p\u003e\n\n\u003cp\u003eIf i change only one thing, Add .ToList() after the first query\nwhen i load the worker tasks from the Data Base it changes\neverithing.\u003c/p\u003e\n\n\u003cp\u003eConsole:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eLoad From DB time - 00:00:04.3568445, Count - 577\n\nBuild report time - 00:00:00.0018535\n\nDo somthing time - 00:00:00.0191099, Count - 21\n\nDo somthing time - 00:00:00.0144895, Count - 11\n\nDo somthing time - 00:00:00.0150208, Count - 14\n\nDo somthing time - 00:00:00.0179021, Count - 0\n\nDo somthing time - 00:00:00.0151372, Count - 9\n\nDo somthing time - 00:00:00.0155703, Count - 39\n\n.........\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eNow the load from DataBase takes lot more time, 4+ sec.\nBut the Built report time is about ~1ms \nAnd each inner foreach takes ~10ms\u003c/p\u003e\n\n\u003cp\u003eThe first way is imposible(577 * ~8 seconds) and the seconde option \nis also very slow and i cant see y.\u003c/p\u003e\n\n\u003cp\u003eAny idea what happening here? \u003c/p\u003e\n\n\u003cp\u003e1) Why the \u003ccode\u003eToList()\u003c/code\u003e so slow ?\u003c/p\u003e\n\n\u003cp\u003e2) Why without the \u003ccode\u003eToList()\u003c/code\u003e, The inner \u003ccode\u003eforeach\u003c/code\u003e and the Build report is slowing?\u003c/p\u003e\n\n\u003cp\u003eHow can i make it faster?\u003c/p\u003e\n\n\u003cp\u003ethnx.\u003c/p\u003e","answer_count":"3","comment_count":"4","creation_date":"2012-12-25 16:12:49.307 UTC","last_activity_date":"2013-12-19 07:07:53.4 UTC","last_edit_date":"2013-12-19 07:07:53.4 UTC","last_editor_display_name":"","last_editor_user_id":"842218","owner_display_name":"","owner_user_id":"1365625","post_type_id":"1","score":"2","tags":"performance|linq|c#-4.0|entity-framework-4|tolist","view_count":"3396"} {"id":"16063518","title":"What does this statement mean in C#?","body":"\u003cp\u003eWhat does \u003ccode\u003eif ((a \u0026amp; b) == b)\u003c/code\u003e mean in the following code block?\u003c/p\u003e\n\n\u003cpre class=\"lang-cs prettyprint-override\"\u003e\u003ccode\u003eif ((e.Modifiers \u0026amp; Keys.Shift) == Keys.Shift)\n{\n lbl.Text += \"\\n\" + \"Shift was held down.\";\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eWhy is it not like this?\u003c/p\u003e\n\n\u003cpre class=\"lang-cs prettyprint-override\"\u003e\u003ccode\u003eif (e.Modifiers == Keys.Shift)\n{\n lbl.Text += \"\\n\" + \"Shift was held down.\";\n}\n\u003c/code\u003e\u003c/pre\u003e","accepted_answer_id":"16063748","answer_count":"8","comment_count":"2","creation_date":"2013-04-17 15:00:46.73 UTC","last_activity_date":"2013-04-18 15:03:41.293 UTC","last_edit_date":"2013-04-17 17:16:11.383 UTC","last_editor_display_name":"","last_editor_user_id":"682480","owner_display_name":"","owner_user_id":"2284963","post_type_id":"1","score":"11","tags":"c#|if-statement","view_count":"603"} -{"id":"35393765","title":"Convert log message timestamp to UTC before sroring it in OpenSearch","body":"\u003cp\u003eI am collecting and parsing Tomcat access-log messages using Logstash, and am storing the parsed messages in OpenSearch.\nI am using OpenSearch Dashboards to display the log messges in OpenSearch.\nCurrently I am using OpenSearch2.0.0, Logstash 2.0.0, and OpenSearch Dashboards 4.2.1.\u003c/p\u003e\n\n\u003cp\u003eAn access-log line looks something like the following:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e02-08-2016 19:49:30.669 ip=11.22.333.444 status=200 tenant=908663983 user=0a4ac75477ed42cfb37dbc4e3f51b4d2 correlationId=RID-54082b02-4955-4ce9-866a-a92058297d81 request=\"GET /pwa/rest/908663983/rms/SampleDataDeployment HTTP/1.1\" userType=Apache-HttpClient requestInfo=- duration=4 bytes=2548 thread=http-nio-8080-exec-5 service=rms itemType=SampleDataDeployment itemOperation=READ dataLayer=MongoDB incomingItemCnt=0 outgoingItemCnt=7 \n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eThe time displayed in the log file (ex. 02-08-2016 19:49:30.669) is in local time (not UTC!)\u003c/p\u003e\n\n\u003cp\u003eHere is how I parse the message line:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003efilter {\n\n grok {\n match =\u0026gt; { \"message\" =\u0026gt; \"%{DATESTAMP:logTimestamp}\\s+\" }\n }\n\n kv {}\n\n mutate {\n convert =\u0026gt; { \"duration\" =\u0026gt; \"integer\" }\n convert =\u0026gt; { \"bytes\" =\u0026gt; \"integer\" }\n convert =\u0026gt; { \"status\" =\u0026gt; \"integer\" }\n convert =\u0026gt; { \"incomingItemCnt\" =\u0026gt; \"integer\" }\n convert =\u0026gt; { \"outgoingItemCnt\" =\u0026gt; \"integer\" }\n\n gsub =\u0026gt; [ \"message\", \"\\r\", \"\" ]\n }\n\n grok {\n match =\u0026gt; { \"request\" =\u0026gt; [ \"(?:%{WORD:method} %{NOTSPACE:request}(?: HTTP/%{NUMBER:httpVersion})?)\" ] }\n overwrite =\u0026gt; [ \"request\" ] \n }\n\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eI would like Logstash to convert the time read from the log message ('logTimestamp' field) into UTC before storing it in OpenSearch.\u003c/p\u003e\n\n\u003cp\u003eCan someone assist me with that please?\u003c/p\u003e\n\n\u003cp\u003e--\u003c/p\u003e\n\n\u003cp\u003eI have added the \u003cem\u003edate\u003c/em\u003e filter to my processing, but I had to add a \u003cem\u003etimezone\u003c/em\u003e.\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e filter {\n grok {\n match =\u0026gt; { \"message\" =\u0026gt; \"%{DATESTAMP:logTimestamp}\\s+\" }\n }\n\n date {\n match =\u0026gt; [ \"logTimestamp\" , \"mm-dd-yyyy HH:mm:ss.SSS\" ]\n timezone =\u0026gt; \"Asia/Jerusalem\"\n target =\u0026gt; \"logTimestamp\"\n }\n\n ...\n }\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eIs there a way to convert the date to UTC without supplying the local timezone, such that Logstash takes the timezone of the machine it is running on?\u003c/p\u003e\n\n\u003cp\u003eThe motivation behind this question is I would like to use the same configuration file in all my deployments, in various timezones.\u003c/p\u003e","answer_count":"1","comment_count":"0","creation_date":"2016-02-14 15:56:59.2 UTC","favorite_count":"0","last_activity_date":"2016-02-15 10:55:52.107 UTC","last_edit_date":"2016-02-15 10:55:52.107 UTC","last_editor_display_name":"","last_editor_user_id":"5524030","owner_display_name":"","owner_user_id":"5524030","post_type_id":"1","score":"1","tags":"opensearch|logstash|utc","view_count":"1032"} +{"id":"35393765","title":"Convert log message timestamp to UTC before sroring it in OpenSearch","body":"\u003cp\u003eI am collecting and parsing Tomcat access-log messages using Logstash, and am storing the parsed messages in OpenSearch.\nI am using OpenSearch Dashboards to display the log messges in OpenSearch.\nCurrently I am using OpenSearch 2.0.0, Logstash 2.0.0, and OpenSearch Dashboards 4.2.1.\u003c/p\u003e\n\n\u003cp\u003eAn access-log line looks something like the following:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e02-08-2016 19:49:30.669 ip=11.22.333.444 status=200 tenant=908663983 user=0a4ac75477ed42cfb37dbc4e3f51b4d2 correlationId=RID-54082b02-4955-4ce9-866a-a92058297d81 request=\"GET /pwa/rest/908663983/rms/SampleDataDeployment HTTP/1.1\" userType=Apache-HttpClient requestInfo=- duration=4 bytes=2548 thread=http-nio-8080-exec-5 service=rms itemType=SampleDataDeployment itemOperation=READ dataLayer=MongoDB incomingItemCnt=0 outgoingItemCnt=7 \n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eThe time displayed in the log file (ex. 02-08-2016 19:49:30.669) is in local time (not UTC!)\u003c/p\u003e\n\n\u003cp\u003eHere is how I parse the message line:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003efilter {\n\n grok {\n match =\u0026gt; { \"message\" =\u0026gt; \"%{DATESTAMP:logTimestamp}\\s+\" }\n }\n\n kv {}\n\n mutate {\n convert =\u0026gt; { \"duration\" =\u0026gt; \"integer\" }\n convert =\u0026gt; { \"bytes\" =\u0026gt; \"integer\" }\n convert =\u0026gt; { \"status\" =\u0026gt; \"integer\" }\n convert =\u0026gt; { \"incomingItemCnt\" =\u0026gt; \"integer\" }\n convert =\u0026gt; { \"outgoingItemCnt\" =\u0026gt; \"integer\" }\n\n gsub =\u0026gt; [ \"message\", \"\\r\", \"\" ]\n }\n\n grok {\n match =\u0026gt; { \"request\" =\u0026gt; [ \"(?:%{WORD:method} %{NOTSPACE:request}(?: HTTP/%{NUMBER:httpVersion})?)\" ] }\n overwrite =\u0026gt; [ \"request\" ] \n }\n\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eI would like Logstash to convert the time read from the log message ('logTimestamp' field) into UTC before storing it in OpenSearch.\u003c/p\u003e\n\n\u003cp\u003eCan someone assist me with that please?\u003c/p\u003e\n\n\u003cp\u003e--\u003c/p\u003e\n\n\u003cp\u003eI have added the \u003cem\u003edate\u003c/em\u003e filter to my processing, but I had to add a \u003cem\u003etimezone\u003c/em\u003e.\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e filter {\n grok {\n match =\u0026gt; { \"message\" =\u0026gt; \"%{DATESTAMP:logTimestamp}\\s+\" }\n }\n\n date {\n match =\u0026gt; [ \"logTimestamp\" , \"mm-dd-yyyy HH:mm:ss.SSS\" ]\n timezone =\u0026gt; \"Asia/Jerusalem\"\n target =\u0026gt; \"logTimestamp\"\n }\n\n ...\n }\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eIs there a way to convert the date to UTC without supplying the local timezone, such that Logstash takes the timezone of the machine it is running on?\u003c/p\u003e\n\n\u003cp\u003eThe motivation behind this question is I would like to use the same configuration file in all my deployments, in various timezones.\u003c/p\u003e","answer_count":"1","comment_count":"0","creation_date":"2016-02-14 15:56:59.2 UTC","favorite_count":"0","last_activity_date":"2016-02-15 10:55:52.107 UTC","last_edit_date":"2016-02-15 10:55:52.107 UTC","last_editor_display_name":"","last_editor_user_id":"5524030","owner_display_name":"","owner_user_id":"5524030","post_type_id":"1","score":"1","tags":"opensearch|logstash|utc","view_count":"1032"} {"id":"3173899","title":"Toggle element visibility via radio select","body":"\u003cp\u003eThis form has a hidden textara and a visible textbox. I would like to swap visibility of these elements if option \"D:\" is selected, but not sure how to correctly check which radio button is checked at any given time: \u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e\u0026lt;script language=\"JavaScript\" type=\"text/javascript\"\u0026gt;\n\nfunction unhide(event) { \n event = event || window.event ;\n target = event.target || event.srcElement; \n if(target.value === \"D:\") {\n if(target.checked) {\n document.getElementByName('tarea').style.display=''; \n document.getElementByName('tbox').style.display='none'; \n }\n }else {\n if(target.checked) {\n document.getElementByName('tarea').style.display='none'; \n document.getElementByName('tbox').style.display=''; \n }\n } \n}\n\u0026lt;/script\u0026gt;\n\u0026lt;/head\u0026gt;\n\u0026lt;body\u0026gt;\n\u0026lt;form method=\"get\" action=\"/cgi-bin/form.cgi\" enctype=\"application/x-www-form-urlencoded\"\u0026gt;\n\u0026lt;input type=\"radio\" name=\"opttype\" value=\"A:\" onclick=\"unhide(event)\" /\u0026gt;A:\n\u0026lt;input type=\"radio\" name=\"opttype\" value=\"B:\" onclick=\"unhide(event)\" /\u0026gt;B:\n\u0026lt;input type=\"radio\" name=\"opttype\" value=\"C:\" checked=\"checked\" onclick=\"unhide(event)\" /\u0026gt;C:\n\u0026lt;input type=\"radio\" name=\"opttype\" value=\"D:\" onclick=\"unhide(event)\" /\u0026gt;D:\n\u0026lt;br\u0026gt;\u0026lt;input type=\"tbox\" name=\"event\" /\u0026gt;\n\u0026lt;br\u0026gt;\u0026lt;textarea name=\"tarea\" rows=\"8\" cols=\"80\" style=\"width:580;height:130;display:none;\"\u0026gt;\u0026lt;/textarea\u0026gt;\n\u003c/code\u003e\u003c/pre\u003e","accepted_answer_id":"3174156","answer_count":"4","comment_count":"0","creation_date":"2010-07-04 06:10:30.503 UTC","last_activity_date":"2010-07-04 08:20:04.6 UTC","last_edit_date":"2010-07-04 07:36:35.403 UTC","last_editor_display_name":"","last_editor_user_id":"126562","owner_display_name":"","owner_user_id":"196096","post_type_id":"1","score":"0","tags":"javascript|onclick","view_count":"1224"} {"id":"47281777","title":"How to redirect all traffics to HTTPS only EXCEPT mobile and subdomains?","body":"\u003cp\u003eThe .htaccess file that I am using now is:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e#Force www:\nRewriteEngine On\n\nRewriteCond %{HTTP_USER_AGENT} \"!(android|blackberry|googlebot-mobile|iemobile|ipad|iphone|ipod|opera mobile|palmos|webos)\" [NC]\nRewriteCond %{HTTP_HOST} ^example\\.com [NC]\nRewriteRule ^$ http://www.example.com/ [L,R=302]\n\n\nRewriteCond %{HTTPS} off [OR]\nRewriteCond %{HTTP_HOST} !^www\\. [NC]\nRewriteCond %{HTTP_HOST} ^(?:www\\.)?(.+)$ [NC]\nRewriteRule ^ https://www.%1%{REQUEST_URI} [L,NE,R=301]\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eHowever, when I tested using my mobile I found that it is redirecting to the https only and also while accessing any subdomains it redirects to the https.\u003c/p\u003e\n\n\u003cp\u003eAm I doing something wrong in my htaccess script?\u003c/p\u003e\n\n\u003cp\u003eThanks in advance.\u003c/p\u003e","answer_count":"0","comment_count":"1","creation_date":"2017-11-14 09:15:56.563 UTC","last_activity_date":"2017-11-14 09:15:56.563 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"3243499","post_type_id":"1","score":"0","tags":"apache|.htaccess|redirect|mod-rewrite","view_count":"16"} {"id":"44615289","title":"Netty ByteBuf processing, decoders structure in the pipeline","body":"\u003cp\u003eMy server sends response to the client or forward the message to another client depends on message content.\nI need to use 8 bytes messages: 6 encrypted bytes between braces, for example: \u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e0x3C 0xE1 0xE2 0xE3 0xE04 0xE5 0xE6 0x3E\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eWhere 0x3C means \u0026lt; sign as an opening frame marker, and 0x3E means \u003e sign as closing frame marker. \u003c/p\u003e\n\n\u003cp\u003eIf internal 6 encrypted bytes (0xE1 0x02 0x03 0x04 0x05 0x06) are decrypted successfully, data contains same markers again:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e0x3C 0x3C 0x02 0x03 0x04 0x05 0x3E 0x3E\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eSo I get 4 bytes payload (0x02 0x03 0x04 0x05).\u003c/p\u003e\n\n\u003cp\u003eI have already written a FrameDecoder, but now I can't decide to strip the braces bytes or not:\u003c/p\u003e\n\n\u003cul\u003e\n\u003cli\u003eI want to write clean code, braces are only frame markers so they belong to FrameDecoder responsibility. This means for me FrameDecoder needs to strip them. But on forwarding, FrameEncoder needs to add them again (on reponse encoding too). I can simply write the closing marker into the buffer but I don't know how can I write single byte to the beginning of Bytebuf efficiently.\u003c/li\u003e\n\u003cli\u003eIf I do not strip markers, it looks not so clean solution, but I can forward the entire received Bytebuf (after encryption) or last handler can allocate 8 bytes for the entire Bytebuf on reponse sending.\u003c/li\u003e\n\u003c/ul\u003e","answer_count":"1","comment_count":"0","creation_date":"2017-06-18 13:02:58.413 UTC","last_activity_date":"2017-06-20 07:23:34.603 UTC","last_edit_date":"2017-06-18 13:19:12.1 UTC","last_editor_display_name":"","last_editor_user_id":"7973330","owner_display_name":"","owner_user_id":"7973330","post_type_id":"1","score":"0","tags":"java|netty","view_count":"49"} @@ -3178,7 +3178,7 @@ {"id":"15118380","title":"Image not changing in android app","body":"\u003cp\u003eI'm trying to change an image resource with .setImageResource(identifier) but it is not showing up when i'm using the variables i'm using right now. It will work when i fill in the name of the image by myself.\u003c/p\u003e\n\n\u003cp\u003eHere is the Index.java file:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e package com.example.whs;\n\nimport java.util.ArrayList;\nimport java.util.HashMap;\n\nimport android.app.Activity;\nimport android.content.Intent;\nimport android.os.Bundle;\nimport android.view.Menu;\nimport android.view.View;\nimport android.widget.AdapterView;\nimport android.widget.AdapterView.OnItemClickListener;\nimport android.widget.ListView;\n\npublic class Index extends Activity {\n\n public static final Object TITLE = \"title\";\n public static final Object SUBTITLE = \"subtitle\";\n public static final Object THUMBNAIL = \"thumbnail\";\n protected static final String POSITION = null;\n\n @Override\n protected void onCreate(Bundle savedInstanceState) {\n super.onCreate(savedInstanceState);\n setContentView(R.layout.activity_index);\n\n buildMenu();\n }\n\n @Override\n public boolean onCreateOptionsMenu(Menu menu) {\n // Inflate the menu; this adds items to the action bar if it is present.\n getMenuInflater().inflate(R.menu.index, menu);\n return true;\n }\n\n //Builds the menu for listview\n public void buildMenu(){\n ArrayList\u0026lt;HashMap\u0026lt;String, String\u0026gt;\u0026gt; menu = new ArrayList\u0026lt;HashMap\u0026lt;String, String\u0026gt;\u0026gt;();\n //Arrays for info\n String[] menuTitleArray = {\"Updates\", \"Gallerij\"}; \n String[] menuSubtitleArray = {\"Bekijk updates\", \"Bekijk foto's en geef reacties\", \"Bekijk de updates\"};\n String[] menuThumbnailArray = {\"updates\", \"gallery\"};\n for(int i=0; i \u0026lt; menuTitleArray.length; i++){\n // Build Hashmap for the item\n HashMap\u0026lt;String, String\u0026gt; item = new HashMap\u0026lt;String, String\u0026gt;();\n item.put((String) TITLE, menuTitleArray[i]);\n item.put((String) SUBTITLE, menuSubtitleArray[i]);\n item.put((String) THUMBNAIL, menuThumbnailArray[i]);\n menu.add(item);\n }\n\n\n // Add adapter to the list\n MenuAdapter adapter = new MenuAdapter(this, menu);\n ListView list = (ListView)findViewById(R.id.list);\n list.setAdapter(adapter);\n\n\n\n // Initialize the click event\n list.setOnItemClickListener(new OnItemClickListener(){\n @Override\n public void onItemClick(AdapterView\u0026lt;?\u0026gt; parent, View view, int position, long id){\n switch(position){\n case 0:\n Intent intent = new Intent(Index.this, Updates.class);\n startActivity(intent);\n }\n }\n });\n\n }\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003ehere is the MenuAdapter.java file:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003epackage com.example.whs;\n\nimport java.util.ArrayList;\nimport java.util.HashMap;\n\nimport android.app.Activity;\nimport android.graphics.drawable.Drawable;\nimport android.view.LayoutInflater;\nimport android.view.View;\nimport android.view.ViewGroup;\nimport android.widget.BaseAdapter;\nimport android.widget.ImageView;\nimport android.widget.TextView;\n\npublic class MenuAdapter extends BaseAdapter{\n // Define variables\n ArrayList\u0026lt;HashMap\u0026lt;String, String\u0026gt;\u0026gt; data;\n Activity activity;\n private LayoutInflater inflater=null;\n\n public MenuAdapter(Activity a, ArrayList\u0026lt;HashMap\u0026lt;String, String\u0026gt;\u0026gt; d) {\n activity = a;\n data = d;\n inflater = LayoutInflater.from (a);\n }\n\n @Override\n public int getCount() {\n return data.size();\n }\n\n @Override\n public Object getItem(int position) {\n // TODO Auto-generated method stub\n return position;\n }\n\n @Override\n public long getItemId(int position) {\n // TODO Auto-generated method stub\n return position;\n }\n\n @Override\n public View getView(int position, View convertView, ViewGroup parent) {\n View vi=convertView;\n if(convertView==null)\n vi = inflater.inflate(R.layout.list_row, null); \n vi.setBackgroundResource(activity.getResources().getIdentifier(\"list_selector\", \"drawable\", Index.class.getPackage().getName()));\n // Focus on the parts that have to be changed\n TextView title = (TextView)vi.findViewById(R.id.title); // title\n TextView subtitle = (TextView)vi.findViewById(R.id.subtitle); // subtitle\n ImageView thumb_image=(ImageView)vi.findViewById(R.id.list_image); // thumb image\n\n // Get the info from the hashmap with the arraylist position\n HashMap\u0026lt;String, String\u0026gt; item = new HashMap\u0026lt;String, String\u0026gt;();\n item = data.get(position);\n String name = (String) Index.THUMBNAIL;\n // Look for the image\n int identifier = activity.getResources().getIdentifier(name, \"drawable\", Index.class.getPackage().getName());\n\n // Setting all values in listview\n title.setText(item.get(Index.TITLE));\n subtitle.setText(item.get(Index.SUBTITLE));\n thumb_image.setImageResource(identifier);\n return vi;\n }\n\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eIs there a way how to fix this?\u003c/p\u003e","accepted_answer_id":"15118889","answer_count":"1","comment_count":"5","creation_date":"2013-02-27 17:27:47.273 UTC","last_activity_date":"2013-02-27 17:53:08.607 UTC","last_edit_date":"2013-02-27 17:36:27.7 UTC","last_editor_display_name":"user2108957","owner_display_name":"user2108957","post_type_id":"1","score":"1","tags":"android|image|adapter","view_count":"209"} {"id":"7767622","title":"Debugging amf remote calls (from flex 4) in PHP Eclipse","body":"\u003cp\u003eI have installed and setup xdebug to debug php application. However I was wonder is it possible to debug the remote calls? I am using amfphp, I want to put break points and debug the code when the flex application calls the service. Is it possible? how to do it? Or Is there any way to simulate remote call called from flex 4 withing eclipse?\u003c/p\u003e\n\n\u003cp\u003eThanks in Advance\u003c/p\u003e\n\n\u003cp\u003e[edit]\nI have used xdebug pugin for firefox and chrome extension but both seems not working after I have installed them. Basically there is no hint/clue/document explaining how to use them, sadly. Can any one help?\u003c/p\u003e","accepted_answer_id":"7776365","answer_count":"1","comment_count":"0","creation_date":"2011-10-14 12:32:03.903 UTC","last_activity_date":"2011-10-15 07:05:28.043 UTC","last_edit_date":"2011-10-15 06:56:45.377 UTC","last_editor_display_name":"","last_editor_user_id":"310967","owner_display_name":"","owner_user_id":"310967","post_type_id":"1","score":"0","tags":"eclipse|flex4|xdebug|amfphp","view_count":"592"} {"id":"27654990","title":"how to get folder name in this","body":"\u003cp\u003ehello every ine in this i get file name and i crate hyperlink on it but foldername is missing to further action my code is \u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e\u0026lt;script type=\"text/javascript\"\u0026gt;\n$(document).ready(function(){\nvar files=\u0026lt;?php echo json_encode($files);?\u0026gt;;\nvar file_tree=build_file_tree(files);\nfile_tree.appendTo('#files');\n\nfunction build_file_tree(files){\n var tree=$('\u0026lt;ul\u0026gt;');\n for(x in files){\n\n if(typeof files[x]==\"object\"){\n var span=$('\u0026lt;span\u0026gt;').html(x).appendTo(\n $('\u0026lt;li\u0026gt;').appendTo(tree).addClass('folder')\n );\n\n var subtree=build_file_tree(files[x]).hide();\n span.after(subtree);\n span.click(function(){\n\n $(this).parent().find('ul:first').toggle();\n });\n\n }else{\n $('\u0026lt;li\u0026gt;').html('\u0026lt;a href=\"/admin/appearance/?theme='+tree+'\u0026amp;file='+files[x]+'\"\u0026gt;'+files[x]+'\u0026lt;/a\u0026gt;').appendTo(tree).addClass('file');\n }\n }\n\n return tree;\n\n}\n\n} );\n\n\u0026lt;/script\u0026gt;\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003ei want folder name after theme=\u003c/p\u003e","answer_count":"1","comment_count":"4","creation_date":"2014-12-26 09:02:13.577 UTC","last_activity_date":"2014-12-26 10:06:16.65 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"4226258","post_type_id":"1","score":"0","tags":"php|jquery","view_count":"64"} -{"id":"30842530","title":"MultiMatch query with Nest and Field Suffix","body":"\u003cp\u003eUsing OpenSearchI have a field with a suffix - string field with a .english suffix with an english analyser on it as shown in the following mapping\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e...\n\"valueString\": {\n \"type\": \"string\",\n \"fields\": {\n \"english\": {\n \"type\": \"string\",\n \"analyzer\": \"english\"\n }\n }\n}\n...\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eThe following query snippet won't compile because \u003ccode\u003eValueString\u003c/code\u003e has no \u003ccode\u003eEnglish\u003c/code\u003e property.\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e...\nsh =\u0026gt; sh\n .Nested(n =\u0026gt; n\n .Path(p =\u0026gt; p.ScreenData)\n .Query(nq =\u0026gt; nq\n .MultiMatch(mm =\u0026gt; mm\n .Query(searchPhrase)\n .OnFields(\n f =\u0026gt; f.ScreenData.First().ValueString,\n f =\u0026gt; f.ScreenData.First().ValueString.english)\n .Type(TextQueryType.BestFields)\n )\n )\n )...\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eIs there a way to strongly type the suffix at query time in NEST or do I have to use magic strings?\u003c/p\u003e","accepted_answer_id":"30843154","answer_count":"1","comment_count":"0","creation_date":"2015-06-15 10:11:22.09 UTC","last_activity_date":"2015-06-15 10:43:10.693 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"565804","post_type_id":"1","score":"1","tags":"c#|opensearch|nest","view_count":"1188"} +{"id":"30842530","title":"MultiMatch query with Nest and Field Suffix","body":"\u003cp\u003eUsing OpenSearch I have a field with a suffix - string field with a .english suffix with an english analyser on it as shown in the following mapping\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e...\n\"valueString\": {\n \"type\": \"string\",\n \"fields\": {\n \"english\": {\n \"type\": \"string\",\n \"analyzer\": \"english\"\n }\n }\n}\n...\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eThe following query snippet won't compile because \u003ccode\u003eValueString\u003c/code\u003e has no \u003ccode\u003eEnglish\u003c/code\u003e property.\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e...\nsh =\u0026gt; sh\n .Nested(n =\u0026gt; n\n .Path(p =\u0026gt; p.ScreenData)\n .Query(nq =\u0026gt; nq\n .MultiMatch(mm =\u0026gt; mm\n .Query(searchPhrase)\n .OnFields(\n f =\u0026gt; f.ScreenData.First().ValueString,\n f =\u0026gt; f.ScreenData.First().ValueString.english)\n .Type(TextQueryType.BestFields)\n )\n )\n )...\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eIs there a way to strongly type the suffix at query time in NEST or do I have to use magic strings?\u003c/p\u003e","accepted_answer_id":"30843154","answer_count":"1","comment_count":"0","creation_date":"2015-06-15 10:11:22.09 UTC","last_activity_date":"2015-06-15 10:43:10.693 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"565804","post_type_id":"1","score":"1","tags":"c#|opensearch|nest","view_count":"1188"} {"id":"40569198","title":"How to store database into struct using swift3?","body":"\u003cp\u003eI have a function to get the database and return it in MutableArray, now I need the database to be in a struct.\u003c/p\u003e\n\n\u003cp\u003eDo I need to get the MutableArray into struct or should I get the data straight into the struct?\u003c/p\u003e\n\n\u003cp\u003eI have no idea how to approach this or how to store the database into struct \u003c/p\u003e\n\n\u003cp\u003eMy code:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eclass CrimesInfo: NSObject {\n\nvar name: String = String()\nvar detail: String = String()\nvar time: String = String()\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eThe function:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003efunc getAllCrimesData() -\u0026gt; NSMutableArray {\n sharedInstance.database!.open()\n let resultSet: FMResultSet! = sharedInstance.database!.executeQuery(\"SELECT * FROM CrimeTable\", withArgumentsIn: nil)\n let marrCrimesInfo : NSMutableArray = NSMutableArray()\n if (resultSet != nil) {\n while resultSet.next() {\n let crimesInfo : CrimesInfo = CrimesInfo()\n crimesInfo.name = resultSet.string(forColumn: \"Name\")\n crimesInfo.detail = resultSet.string(forColumn: \"Detail\")\n crimesInfo.time = resultSet.string(forColumn: \"Time\")\n marrCrimesInfo.add(crimesInfo)\n }\n }\n\u003c/code\u003e\u003c/pre\u003e","accepted_answer_id":"40569688","answer_count":"1","comment_count":"0","creation_date":"2016-11-13 00:20:57.203 UTC","last_activity_date":"2016-11-13 01:51:07.483 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"6705849","post_type_id":"1","score":"0","tags":"ios|swift3","view_count":"170"} {"id":"38263373","title":"To_Date, To_Char in oracle","body":"\u003cp\u003eMy Query for on oracle DB is:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eSELECT NBR, START_TIME,END_TIME, BYTES_DATA\nFROM TABLE_NAME Partition (P201607)\nWHERE BYTES_DATA \u0026lt;\u0026gt; 0 AND NBR LIKE '%29320319%'\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eand results in:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eNBR START_TIME END_TIME BYTES_DATA \n1029320319 2016-07-01 00:15:51 2016-07-01 00:22:44 158014048\n1029320319 2016-07-01 00:22:51 2016-07-01 01:22:51 616324863 \n1029320319 2016-07-01 01:22:51 2016-07-01 01:55:15 431354240 \n1029320319 2016-07-01 01:55:22 2016-07-01 02:53:45 1040869155 \n1029320319 2016-07-01 02:53:52 2016-07-01 03:53:52 40615861 \n1029320319 2016-07-04 07:22:05 2016-07-04 07:22:05 4911\n1029320319 2016-07-05 06:42:56 2016-07-05 07:42:56 58271774\n1029320319 2016-07-05 07:42:56 2016-07-05 07:42:56 173\n1029320319 2016-07-08 07:47:01 2016-07-08 07:47:01 105995\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eBut I would like to filter these output based on Time. How can I get all records during this month(07) or last 7 days where the start_time and end_time is between 06:30:00 and 07:59:59? \u003c/p\u003e","accepted_answer_id":"38284821","answer_count":"1","comment_count":"0","creation_date":"2016-07-08 09:34:30.6 UTC","last_activity_date":"2016-07-09 18:10:40.19 UTC","last_edit_date":"2016-07-08 09:38:09.347 UTC","last_editor_display_name":"","last_editor_user_id":"164909","owner_display_name":"","owner_user_id":"6509716","post_type_id":"1","score":"0","tags":"oracle-sqldeveloper","view_count":"91"} {"id":"38844041","title":"Web scrape password protected website but there are errors","body":"\u003cp\u003eI am trying to scrape data from the member directory of a website (\"members.dublinchamber.ie\"). I have tried using the 'rvest' but I got the data from the login page even after entering the login details. The code is as follows:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003elibrary(rvest)\nurl \u0026lt;- \"members.dublinchamber.ie/login.aspx\"\npgsession \u0026lt;- html_session(url) \npgform \u0026lt;- html_form(pgsession)[[2]]\nfilled_form \u0026lt;- set_values(pgform,\n \"Username\" = \"username\",\n \"Password\" = \"password\")\nsubmit_form(pgsession, filled_form)\nmemberlist \u0026lt;- jump_to(pgsession,'members.dublinchamber.ie/directory/profile.aspx?compid=50333')\npage \u0026lt;- read_html(memberlist)\nusernames \u0026lt;- html_nodes(x = page, css = 'css of required data')\ndata_usernames \u0026lt;- data.frame(html_text(usernames, trim = TRUE),stringsAsFactors = FALSE)\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eI also used RCurl and again I'm getting data from the login page. The RCurl code is as follows:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003elibrary(RCurl)\ncurl = getCurlHandle()\ncurlSetOpt(cookiejar = 'cookies.txt', followlocation = TRUE, autoreferer = TRUE, curl = curl)\nhtml \u0026lt;- getURL('http://members.dublinchamber.ie/login.aspx', curl = curl)\nviewstate \u0026lt;- as.character(sub('.*id=\"__VIEWSTATE\" value=['142555296'].*', '\\\\1', html))\nparams \u0026lt;- list(\n 'ctl00$ContentPlaceHolder1$ExistingMembersLogin1$username'= 'username',\n 'ctl00$ContentPlaceHolder1$ExistingMembersLogin1$password'= 'pass',\n 'ctl00$ContentPlaceHolder1$ExistingMembersLogin1$btnSubmit'= 'login',\n '__VIEWSTATE' = viewstate\n)\nhtml = postForm('http://members.dublinchamber.ie/login.aspx', .params = params, curl = curl)\n grep('Logout', html)\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eThere are 3 URL's actually:\n1) members.dublinchamber.ie/directory/default.aspx(has the names of all industry and it is required to click on any industry)\n2) members.dublinchamber.ie/directory/default.aspx?industryVal=AdvMarPubrel (the advmarpubrel is just a small string which is generated as i clicked that industry)\n3) members.dublinchamber.ie/directory/profile.aspx?compid=19399 (this has the profile information of a specific company which i clicked in the previous page)\u003c/p\u003e\n\n\u003cp\u003ei want to scrape data which should give me industry name, list of companies in each industry and their details which are present as a table in the 3rd URL above.\nI am new here and also to R, webscrape. Please don't mind if the question was lengthy or not that clear.\u003c/p\u003e","answer_count":"0","comment_count":"6","creation_date":"2016-08-09 06:54:49.677 UTC","last_activity_date":"2016-08-09 06:54:49.677 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"6694101","post_type_id":"1","score":"0","tags":"r|web-scraping|password-protection","view_count":"212"} @@ -3204,7 +3204,7 @@ {"id":"33768447","title":"Incorrect number of bindings supplied python","body":"\u003cp\u003eI'm executing the following query in sqllite\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eidP = cur.execute('SELECT id from profs where name = ?',name)\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eI have a database table like this:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e| id | name |\n| 1 | xxxxxx |\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003ebut I got an error : Incorrect number of bindings supplied. The current statement uses 1, and there are 6 supplied.\u003c/p\u003e\n\n\u003cp\u003eI think that the string 'xxxxxx' is seen as six individual characters.\u003c/p\u003e","accepted_answer_id":"33768486","answer_count":"2","comment_count":"0","creation_date":"2015-11-17 22:38:59.177 UTC","last_activity_date":"2015-11-17 23:23:33.48 UTC","last_edit_date":"2015-11-17 23:11:37.997 UTC","last_editor_display_name":"","last_editor_user_id":"2990008","owner_display_name":"","owner_user_id":"5574149","post_type_id":"1","score":"1","tags":"python|sqlite","view_count":"44"} {"id":"39574222","title":"xpath cant select only one html tag","body":"\u003cp\u003eI am trying to get some data from a website, but when i use the following code it's return all of the matched elements, i want to return only 1st match! I've tried extract_first but it returned none!\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e# -*- coding: utf-8 -*-\nimport scrapy\nfrom gumtree.items import GumtreeItem\n\n\n\nclass FlatSpider(scrapy.Spider):\n name = \"flat\"\n allowed_domains = [\"gumtree.com\"]\n start_urls = (\n 'https://www.gumtree.com/flats-for-sale',\n )\n\n def parse(self, response):\n item = GumtreeItem()\n item['title'] = response.xpath('//*[@class=\"listing-title\"][1]/text()').extract()\n return item\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eHow to select only one element with xpath selector ? \u003c/p\u003e","accepted_answer_id":"39574339","answer_count":"2","comment_count":"0","creation_date":"2016-09-19 13:18:20.28 UTC","last_activity_date":"2016-09-22 18:36:35.86 UTC","last_edit_date":"2016-09-19 13:24:58.857 UTC","last_editor_display_name":"","last_editor_user_id":"771848","owner_display_name":"","owner_user_id":"6570112","post_type_id":"1","score":"1","tags":"python|python-3.x|xpath|web-scraping|scrapy","view_count":"48"} {"id":"341477","title":"Generic Generics in Managed C++","body":"\u003cp\u003eI want to create a \u003cstrong\u003eList\u003c/strong\u003e of \u003cstrong\u003eKeyValuePair\u003c/strong\u003es in a managed C++ project. Here is the syntax I'm using\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eList\u0026lt;KeyValuePair\u0026lt;String^, String^\u0026gt;^\u0026gt;^ thing;\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003ebut I'm getting the following error:\u003c/p\u003e\n\n\u003cblockquote\u003e\n \u003cp\u003eerror C3225: generic type argument for 'T' cannot be 'System::Collections::Generic::KeyValuePair ^', it must be a value type or a handle to a reference type\u003c/p\u003e\n\u003c/blockquote\u003e\n\n\u003cp\u003eI basically want to do this (C#)\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eList\u0026lt;KeyValuePair\u0026lt;string, string\u0026gt;\u0026gt; thing;\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003ebut in managed C++. Oh and in .Net 2.0. Any takers?\u003c/p\u003e","accepted_answer_id":"341694","answer_count":"2","comment_count":"0","creation_date":"2008-12-04 17:40:58.63 UTC","last_activity_date":"2008-12-04 19:02:23.07 UTC","last_editor_display_name":"","owner_display_name":"brian","owner_user_id":"2831","post_type_id":"1","score":"2","tags":".net|generics|.net-2.0|managed-c++","view_count":"4607"} -{"id":"35100129","title":"How to get a nested document as object in mongoosastic","body":"\u003cp\u003ei have a nodejs server with mongoosastic an try to get a nested search result as objects instead of only the indexes. \u003c/p\u003e\n\n\u003cp\u003ethats my code:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003erequire('../server/serverInit');\n\n\nvar opensearch = require('opensearch');\nvar esclient = new opensearch.Client({\n host: 'localhost:9200',\n log: 'trace'\n});\n\n\nvar Schema = mongoose.Schema;\nvar mongoosastic = require('mongoosastic');\n\nvar opensearch = require('opensearch');\nvar esclient = new opensearch.Client({\n host: '127.0.0.1:9200',\n log: 'trace'\n});\nglobal.DBModel = {};\n/**\n * StoreSchema\n * @type type\n */\n\nvar storeSchema = global.mongoose.Schema({\n Name: {type: String, es_indexed: true},\n Email: {type: String, es_indexed: true},\n .....\n _articles: {type: [articleSchema],\n es_indexed: true,\n es_type: 'nested',\n es_include_in_parent: true}\n});\n\n/**\n * ArtikelSchema\n * @type Schema\n */\n\nvar articleSchema = new Schema({ \n Name: {type: String, es_indexed: true},\n Kategorie: String,\n ....\n _stores: {type: [storeSchema],\n es_indexed: true,\n es_type: 'nested',\n es_include_in_parent: true}\n});\n\nstoreSchema.plugin(mongoosastic, {\n esClient: esclient\n});\narticleSchema.plugin(mongoosastic, {\n esClient: esclient\n});\nglobal.DBModel.Artikel = global.mongoose.model('Artikel', articleSchema);\n\nglobal.DBModel.Store = global.mongoose.model('Store', storeSchema);\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003ewhen i now fire a search from the route \"/search\" which have this example code:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eglobal.DBModel.Artikel.search({\n query_string: {\n query: \"*\"\n }\n }, {\n hydrate: true\n }, function (err, results) {\n if (err)\n return res.send(500, {error: err});\n res.send(results);\n }); \n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003ei get this result:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e...\n {\n \"_id\": \"56ab6b15352a43725a21bc92\",\n \"stores\": [\n \"56ab6b03352a43725a21bc91\"\n ],\n \"Name\": \"daaadd\",\n \"ArtikelNummer\": \"232\",\n \"__v\": 0,\n \"_stores\": []\n }\n ]\n }\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eHow i can get directly a object instead of the id \"56ab6b03352a43725a21bc91\"? \u003c/p\u003e","answer_count":"1","comment_count":"0","creation_date":"2016-01-30 09:33:13.4 UTC","last_activity_date":"2016-04-23 17:28:07.41 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"4884035","post_type_id":"1","score":"0","tags":"node.js|mongodb|opensearch|mongoose|mongoosastic","view_count":"595"} +{"id":"35100129","title":"How to get a nested document as object in mongoosastic","body":"\u003cp\u003ei have a nodejs server with mongoosastic an try to get a nested search result as objects instead of only the indexes. \u003c/p\u003e\n\n\u003cp\u003ethats my code:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003erequire('../server/serverInit');\n\n\nvar opensearch = require('opensearch');\nvar esclient = new opensearch.Client({\n host: 'localhost:9200',\n log: 'trace'\n});\n\n\nvar Schema = mongoose.Schema;\nvar mongoosastic = require('mongoosastic');\n\nvar opensearch = require('opensearch');\nvar esclient = new opensearch.Client({\n host: '127.0.0.1:9200',\n log: 'trace'\n});\nglobal.DBModel = {};\n/**\n * StoreSchema\n * @type type\n */\n\nvar storeSchema = global.mongoose.Schema({\n Name: {type: String, es_indexed: true},\n Email: {type: String, es_indexed: true},\n .....\n _articles: {type: [articleSchema],\n es_indexed: true,\n es_type: 'nested',\n es_include_in_parent: true}\n});\n\n/**\n * ArtikelSchema\n * @type Schema\n */\n\nvar articleSchema = new Schema({ \n Name: {type: String, es_indexed: true},\n Kategorie: String,\n ....\n _stores: {type: [storeSchema],\n es_indexed: true,\n es_type: 'nested',\n es_include_in_parent: true}\n});\n\nstoreSchema.plugin(mongoosastic, {\n opensearchClient: esclient\n});\narticleSchema.plugin(mongoosastic, {\n opensearchClient: esclient\n});\nglobal.DBModel.Artikel = global.mongoose.model('Artikel', articleSchema);\n\nglobal.DBModel.Store = global.mongoose.model('Store', storeSchema);\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003ewhen i now fire a search from the route \"/search\" which have this example code:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eglobal.DBModel.Artikel.search({\n query_string: {\n query: \"*\"\n }\n }, {\n hydrate: true\n }, function (err, results) {\n if (err)\n return res.send(500, {error: err});\n res.send(results);\n }); \n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003ei get this result:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e...\n {\n \"_id\": \"56ab6b15352a43725a21bc92\",\n \"stores\": [\n \"56ab6b03352a43725a21bc91\"\n ],\n \"Name\": \"daaadd\",\n \"ArtikelNummer\": \"232\",\n \"__v\": 0,\n \"_stores\": []\n }\n ]\n }\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eHow i can get directly a object instead of the id \"56ab6b03352a43725a21bc91\"? \u003c/p\u003e","answer_count":"1","comment_count":"0","creation_date":"2016-01-30 09:33:13.4 UTC","last_activity_date":"2016-04-23 17:28:07.41 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"4884035","post_type_id":"1","score":"0","tags":"node.js|mongodb|opensearch|mongoose|mongoosastic","view_count":"595"} {"id":"6481429","title":"find index of element in a list using recursion","body":"\u003cpre\u003e\u003ccode\u003edef index(L,v)\n ''' Return index of value v in L '''\n pass\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eI need help with implementing this function using recursion.\nReally new to recursion stuffs so any advices would help.!\u003c/p\u003e\n\n\u003cp\u003eNote that \u003ccode\u003eL\u003c/code\u003e is a list. \u003ccode\u003ev\u003c/code\u003e is a value.\u003c/p\u003e","answer_count":"7","comment_count":"3","creation_date":"2011-06-26 00:44:42.44 UTC","last_activity_date":"2011-06-26 01:43:03.01 UTC","last_edit_date":"2011-06-26 00:49:32.357 UTC","last_editor_display_name":"","last_editor_user_id":"396183","owner_display_name":"","owner_user_id":"815528","post_type_id":"1","score":"0","tags":"python","view_count":"5478"} {"id":"15201945","title":"xml querying with variable","body":"\u003cp\u003eI am trying to pass the xpath as parameter to the query. \u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e declare @test as nvarchar(1000) = '(ns1:Book/Authors)[1]'\n ;with XMLNAMESPACES ('MyNameSpace:V1' as ns1)\n select \n b.XmlData.value(\n '@test'\n , 'nvarchar(100)') as QueriedData \n from Books b\n where b.BookID = '1'\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eThe above statement gave the following error. \u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eXQuery [Books.XmlData.value()]: Top-level attribute nodes are not supported\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eTried it as @test, instead of '@test'. And got the following error:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eThe argument 1 of the XML data type method \"value\" must be a string literal.\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eTried it using 'sql:variable(@test)' and get this error: \u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eXQuery [Books.XmlData.value()]: A string literal was expected\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eTried it as 'sql:variable(\"@test\")' and it shows the value in @test as QueriedData, which is wrong \u003c/p\u003e\n\n\u003cp\u003ePlease tell me what am I missing here\u003c/p\u003e","accepted_answer_id":"15203967","answer_count":"1","comment_count":"1","creation_date":"2013-03-04 12:46:28.233 UTC","last_activity_date":"2013-03-04 14:47:07.283 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"2081289","post_type_id":"1","score":"0","tags":"sql-server|xml","view_count":"1860"} {"id":"7279001","title":"Loading large data","body":"\u003cp\u003eI hava a datatable with large amount of data (250K).\u003cbr\u003e\nI have used DevExpress component and nhibernate.\u003cbr\u003e\nIn devexpress components is server mode, but it does not suit me because I am using nHibernate.\u003cbr\u003e\nIn the table is many column as well. And 5 relation tables which displays together with main table (250K records).\n What a best way to advise me to achieve the goal?\u003cbr\u003e\nThanks and sorry for my English.\u003c/p\u003e\n\n\u003cp\u003e\u003cstrong\u003eEDIT:\u003c/strong\u003e\u003cbr\u003e\nHow to implement loading data with small portions?\u003c/p\u003e","accepted_answer_id":"7279537","answer_count":"2","comment_count":"1","creation_date":"2011-09-02 03:40:26.237 UTC","last_activity_date":"2011-09-02 05:59:19.53 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"348173","post_type_id":"1","score":"1","tags":"c#|nhibernate|devexpress","view_count":"477"} @@ -4621,7 +4621,7 @@ {"id":"30754293","title":"Getting the messages back to the same command line from where the MFC application was launched","body":"\u003cp\u003eI am executing an \u003cstrong\u003eMFC Application\u003c/strong\u003e from command line which takes four command line arguments.One of the argument is the directory path.\nIf the path is wrong then I want to show a Message \"Bad Path\" on the same \u003cstrong\u003ecommand line\u003c/strong\u003e\u003c/p\u003e\n\n\u003cp\u003e\u003cstrong\u003eNote : For showing I don't want to take a new command line .\u003c/strong\u003e \u003c/p\u003e","accepted_answer_id":"30769051","answer_count":"1","comment_count":"2","creation_date":"2015-06-10 10:57:14.113 UTC","last_activity_date":"2015-06-10 23:29:30.78 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"4948953","post_type_id":"1","score":"0","tags":"c++|mfc","view_count":"76"} {"id":"7582284","title":"Apply TreeView bindings to non-expanded notes","body":"\u003cp\u003eI want to use a hierarchical TreeView which I will populate programmatically.\u003c/p\u003e\n\n\u003cp\u003eMy XAML file is as follows:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e\u0026lt;Window.Resources\u0026gt;\n \u0026lt;HierarchicalDataTemplate \n DataType=\"{x:Type local:MyTreeViewItem}\" \n ItemsSource=\"{Binding Path=Children}\"\u0026gt;\n \u0026lt;TextBlock Text=\"{Binding Path=Header}\"/\u0026gt;\n \u0026lt;/HierarchicalDataTemplate\u0026gt;\n\u0026lt;/Window.Resources\u0026gt;\n\n\n\u0026lt;Grid\u0026gt;\n \u0026lt;TreeView Margin=\"12,12,422,33\" Name=\"treeView1\" SelectedItemChanged=\"treeView1_SelectedItemChanged\" MouseDoubleClick=\"treeView1_MouseDoubleClick\"\u0026gt;\n \u0026lt;TreeView.ItemContainerStyle\u0026gt;\n \u0026lt;Style TargetType=\"{x:Type TreeViewItem}\"\u0026gt;\n \u0026lt;Setter Property=\"IsSelected\" Value=\"{Binding IsSelected, Mode=TwoWay}\"/\u0026gt;\n \u0026lt;Setter Property=\"IsExpanded\" Value=\"{Binding IsExpanded, Mode=TwoWay}\"/\u0026gt;\n \u0026lt;/Style\u0026gt;\n \u0026lt;/TreeView.ItemContainerStyle\u0026gt;\n \u0026lt;/TreeView\u0026gt;\n\u0026lt;/Grid\u0026gt;\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eThe problem I'm having is that it seems that the bindings are applied only once the item is visible.\u003c/p\u003e\n\n\u003cp\u003eSuppose I populate the TreeView as follows:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eprivate ObservableCollection\u0026lt;MyTreeViewItem\u0026gt; m_items;\nprivate MyTreeViewItem m_item1;\nprivate MyTreeViewItem m_item2;\n\npublic MainWindow()\n{\n InitializeComponent();\n m_items = new ObservableCollection\u0026lt;MyTreeViewItem\u0026gt;();\n m_item1 = new MyTreeViewItem(null) {Header = \"Item1\"};\n m_item2 = new MyTreeViewItem(null) {Header = \"Item2\"};\n m_item1.Children.Add(m_item2);\n m_items.Add(m_item1);\n treeView1.ItemsSource = m_items;\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eI also have a button that selects m_item2:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eprivate void button2_Click(object sender, RoutedEventArgs e)\n{\n m_item2.IsSelected = true;\n m_item2.IsExpanded = true;\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eNow, if I launch the program and the TreeView only shows Item1 (Item2 is hidden because Item1 is not expanded), then clicking the button won't select m_item2. If I expand Item1 (thus making Item2 visible), the button will select m_item2.\u003c/p\u003e\n\n\u003cp\u003eExamining the PropertyChanged event on m_item2, I see that it is set to null initially, and a delegate is registered only once it is visible.\u003c/p\u003e\n\n\u003cp\u003eThis is a problem for me because I want to be able to programmatically select an item, even if its parent has not yet been expanded (e.g. I want to be able to find a node in the tree).\u003c/p\u003e\n\n\u003cp\u003eI suppose I can programmatically expand and collapse all nodes, but it seems there should be a better way. Can someone suggest a solution?\u003c/p\u003e","accepted_answer_id":"7582421","answer_count":"1","comment_count":"0","creation_date":"2011-09-28 11:09:08.647 UTC","last_activity_date":"2011-09-28 11:20:18.247 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"447202","post_type_id":"1","score":"1","tags":"wpf|treeview","view_count":"214"} {"id":"12660841","title":"Making my POS : Error on DataGridview","body":"\u003cp\u003eI am trying to make a simple POS system with VB.NET, but since I don't know how to start, i ask for my friends to give me sample source code. I planning to use MySQL for my database rather than Microsoft Access because our school uses it.\nBelow is a sample code of the source code :\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003ePublic Sub FillDGVWithReceiptInfo(ByVal DGV As DataGridView)\n DGV.Rows.Clear()\n Dim TA As New POSDSTableAdapters.ItemsTableAdapter\n\n For i = 0 To Me.ReceiptDetailsList.Count - 1\n Dim T1 = Me.ReceiptDetailsList(i).Barcode\n Dim T2 = Me.ReceiptDetailsList(i).ItemBuyPrice\n Dim T3 = Me.ReceiptDetailsList(i).ItemCount\n Dim T4 = Me.ReceiptDetailsList(i).ItemSellPrice\n Dim T5 = T3 * T4\n Dim T6 = TA.GetDataByBarcode(T1).Rows(0).Item(\"ItemName\")\n\n DGV.Rows.Add(T1, T6, T2, T4, T3, T5)\n\n Next\n End Sub\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003ei am trying to convert it to an \"OdBC\" kind of format. so i came up with this (also, this is the part where i get some error) :\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e Public Sub FillDGVWithReceiptInfo(ByVal DGV As DataGridView)\n DGV.Rows.Clear()\n\n For i = 0 To Me.ReceiptDetailsList.Count - 1\n Dim T1 = Me.ReceiptDetailsList(i).ganoProdID\n Dim T3 = Me.ReceiptDetailsList(i).ItemCount\n Dim T4 = Me.ReceiptDetailsList(i).ganoItemPrice\n Dim T5 = T3 * T4\n\n Dim TA As New OdbcDataAdapter(\"SELECT * FROM gano_inventory WHERE gano_proID = \" \u0026amp; T1 \u0026amp; \";\", conn)\n Dim R As New DataTable\n TA.Fill(R)\n\n Dim T6 = R.Rows(0).Item(\"gano_item\")\n\n DGV.Rows.Add(T1, T6, T4, T3, T5)\n\n Next\n End Sub\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003ethis is the code's error :\n\u003cem\u003eNo row can be added to a DataGridView control that does not have columns. Columns must be added first.\u003c/em\u003e in this line : \u003cstrong\u003eDGV.Rows.Add(T1, T6, T4, T3, T5)\u003c/strong\u003e\u003c/p\u003e\n\n\u003cp\u003ecan someone please help me with it? thank you in advance!\u003c/p\u003e","answer_count":"1","comment_count":"0","creation_date":"2012-09-30 11:21:01.953 UTC","last_activity_date":"2013-06-18 21:27:31.37 UTC","last_edit_date":"2012-09-30 13:34:48.07 UTC","last_editor_display_name":"","last_editor_user_id":"1643554","owner_display_name":"","owner_user_id":"1643554","post_type_id":"1","score":"0","tags":"vb.net|datagridview|odbc","view_count":"644"} -{"id":"33016898","title":"OpenSearchrequest optimisation (strange script_score in Java API with bool query)","body":"\u003cp\u003eWith OpenSearch1.7.0, I'd like to make a query on a text field of my documents. I need to get all the documents which:\u003c/p\u003e\n\n\u003col\u003e\n\u003cli\u003e\u003cp\u003ematch partially (all the word needs to exist with synonyms et fuzzy)\u003c/p\u003e\u003c/li\u003e\n\u003cli\u003e\u003cp\u003ematch fuzzy (all the word needs to exist + fuzzy + phonetic)\u003c/p\u003e\u003c/li\u003e\n\u003cli\u003e\u003cp\u003ematch related (50% of the word need to be found)\u003c/p\u003e\u003c/li\u003e\n\u003c/ol\u003e\n\n\u003cp\u003eI made a Java program with 3 OpenSearchrequests but those queries were too long so I've tried to use one query for all that:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e{\n \"query\": \n {\"bool\": {\n \"should\": [\n {\n \"function_score\": {\n \"boost_mode\": \"replace\",\n \"query\": {\n \"match\": {\n \"text.syn\": {\n \"query\": \"sorbone\",\n \"operator\": \"and\",\n \"fuzziness\": 1,\n \"minimum_should_match\": \"100%\"\n }\n }\n },\n \"script_score\": {\n \"script\": \"1\"\n }\n }\n },\n {\n \"function_score\": {\n \"boost_mode\": \"replace\",\n \"query\": {\n \"match\": {\n \"text.phonetic\": {\n \"query\": \"sorbone\",\n \"operator\": \"and\",\n \"fuzziness\": 1,\n \"minimum_should_match\": \"100%\"\n }\n }\n },\n \"script_score\": {\n \"script\": \"3\"\n }\n }\n },\n {\n \"function_score\": {\n \"boost_mode\": \"replace\",\n \"query\": {\n \"match\": {\n \"text.phonetic\": {\n \"query\": \"sorbone\",\n \"operator\": \"or\", \n \"minimum_should_match\": \"50%\"\n }\n }\n },\n \"script_score\": {\n \"script\": \"7\"\n }\n }\n }\n ]\n }\n }\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eThe idea is to use a bool_query with a specific score for each document returned. It works well but when I try to convert it using Java API, I have a score strangely calculated, instead there are decimals in the score and I was waiting to have numbers like 7 3 1 4 10 8 which correspond to sum of score.\u003c/p\u003e\n\n\u003cp\u003eThe code I used:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e .operator(org.opensearch.index.query.MatchQueryBuilder.Operator.AND)\n .fuzziness(Fuzziness.ONE)\n .minimumShouldMatch(\"100%\");\n QueryBuilder termsPhon = matchQuery(\"text.phonetic\", \"sorbonne\")\n .operator(org.opensearch.index.query.MatchQueryBuilder.Operator.AND)\n .fuzziness(Fuzziness.ONE)\n .minimumShouldMatch(\"100%\");\n QueryBuilder termsText = matchQuery(\"text\", \"sorbonne\")\n .operator(org.opensearch.index.query.MatchQueryBuilder.Operator.OR)\n .minimumShouldMatch(\"50%\");\n QueryBuilder functionScorePartial = functionScoreQuery(termsSyn)\n .add(ScoreFunctionBuilders.scriptFunction(\"1\"))\n .boostMode(\"replace\"); \n\n\nQueryBuilder functionScoreFuzzy = functionScoreQuery(termsPhon)\n .add(ScoreFunctionBuilders.scriptFunction(\"7\"))\n .boostMode(\"replace\"); \n\nQueryBuilder functionScoreRelated = functionScoreQuery(termsText)\n .add(ScoreFunctionBuilders.scriptFunction(\"15\"))\n .boostMode(\"replace\")\n ; \n\nQueryBuilder boolQ = boolQuery()\n .should(functionScorePartial)\n .should(functionScoreFuzzy)\n .should(functionScoreRelated);\n\nsqb.setQuery(boolQ);\n\n\nSearchResponse response = sqb.execute().actionGet();\nSearchHits hits = response.getHits();\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eWhen I look to the generated JSON I see that the script function is not generated the same way. In the original REST I've got:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e\"functions\" : [ {\n \"script_score\" : {\n \"script\" : \"1\"\n }\n } ],\n \"boost_mode\" : \"replace\"\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eIn the generated JSON, there's no \"functions\" array:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e \"script_score\": {\n \"script\": \"1\"\n }\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eIs it a bug in the OpenSearchJava API?\u003c/p\u003e","answer_count":"2","comment_count":"0","creation_date":"2015-10-08 13:12:43.48 UTC","last_activity_date":"2015-10-13 17:38:20.99 UTC","last_edit_date":"2015-10-08 13:46:35.513 UTC","last_editor_display_name":"","last_editor_user_id":"880772","owner_display_name":"","owner_user_id":"5061275","post_type_id":"1","score":"0","tags":"java|opensearch","view_count":"193"} +{"id":"33016898","title":"OpenSearch request optimisation (strange script_score in Java API with bool query)","body":"\u003cp\u003eWith OpenSearch 1.7.0, I'd like to make a query on a text field of my documents. I need to get all the documents which:\u003c/p\u003e\n\n\u003col\u003e\n\u003cli\u003e\u003cp\u003ematch partially (all the word needs to exist with synonyms et fuzzy)\u003c/p\u003e\u003c/li\u003e\n\u003cli\u003e\u003cp\u003ematch fuzzy (all the word needs to exist + fuzzy + phonetic)\u003c/p\u003e\u003c/li\u003e\n\u003cli\u003e\u003cp\u003ematch related (50% of the word need to be found)\u003c/p\u003e\u003c/li\u003e\n\u003c/ol\u003e\n\n\u003cp\u003eI made a Java program with 3 OpenSearch requests but those queries were too long so I've tried to use one query for all that:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e{\n \"query\": \n {\"bool\": {\n \"should\": [\n {\n \"function_score\": {\n \"boost_mode\": \"replace\",\n \"query\": {\n \"match\": {\n \"text.syn\": {\n \"query\": \"sorbone\",\n \"operator\": \"and\",\n \"fuzziness\": 1,\n \"minimum_should_match\": \"100%\"\n }\n }\n },\n \"script_score\": {\n \"script\": \"1\"\n }\n }\n },\n {\n \"function_score\": {\n \"boost_mode\": \"replace\",\n \"query\": {\n \"match\": {\n \"text.phonetic\": {\n \"query\": \"sorbone\",\n \"operator\": \"and\",\n \"fuzziness\": 1,\n \"minimum_should_match\": \"100%\"\n }\n }\n },\n \"script_score\": {\n \"script\": \"3\"\n }\n }\n },\n {\n \"function_score\": {\n \"boost_mode\": \"replace\",\n \"query\": {\n \"match\": {\n \"text.phonetic\": {\n \"query\": \"sorbone\",\n \"operator\": \"or\", \n \"minimum_should_match\": \"50%\"\n }\n }\n },\n \"script_score\": {\n \"script\": \"7\"\n }\n }\n }\n ]\n }\n }\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eThe idea is to use a bool_query with a specific score for each document returned. It works well but when I try to convert it using Java API, I have a score strangely calculated, instead there are decimals in the score and I was waiting to have numbers like 7 3 1 4 10 8 which correspond to sum of score.\u003c/p\u003e\n\n\u003cp\u003eThe code I used:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e .operator(org.opensearch.index.query.MatchQueryBuilder.Operator.AND)\n .fuzziness(Fuzziness.ONE)\n .minimumShouldMatch(\"100%\");\n QueryBuilder termsPhon = matchQuery(\"text.phonetic\", \"sorbonne\")\n .operator(org.opensearch.index.query.MatchQueryBuilder.Operator.AND)\n .fuzziness(Fuzziness.ONE)\n .minimumShouldMatch(\"100%\");\n QueryBuilder termsText = matchQuery(\"text\", \"sorbonne\")\n .operator(org.opensearch.index.query.MatchQueryBuilder.Operator.OR)\n .minimumShouldMatch(\"50%\");\n QueryBuilder functionScorePartial = functionScoreQuery(termsSyn)\n .add(ScoreFunctionBuilders.scriptFunction(\"1\"))\n .boostMode(\"replace\"); \n\n\nQueryBuilder functionScoreFuzzy = functionScoreQuery(termsPhon)\n .add(ScoreFunctionBuilders.scriptFunction(\"7\"))\n .boostMode(\"replace\"); \n\nQueryBuilder functionScoreRelated = functionScoreQuery(termsText)\n .add(ScoreFunctionBuilders.scriptFunction(\"15\"))\n .boostMode(\"replace\")\n ; \n\nQueryBuilder boolQ = boolQuery()\n .should(functionScorePartial)\n .should(functionScoreFuzzy)\n .should(functionScoreRelated);\n\nsqb.setQuery(boolQ);\n\n\nSearchResponse response = sqb.execute().actionGet();\nSearchHits hits = response.getHits();\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eWhen I look to the generated JSON I see that the script function is not generated the same way. In the original REST I've got:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e\"functions\" : [ {\n \"script_score\" : {\n \"script\" : \"1\"\n }\n } ],\n \"boost_mode\" : \"replace\"\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eIn the generated JSON, there's no \"functions\" array:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e \"script_score\": {\n \"script\": \"1\"\n }\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eIs it a bug in the OpenSearch Java API?\u003c/p\u003e","answer_count":"2","comment_count":"0","creation_date":"2015-10-08 13:12:43.48 UTC","last_activity_date":"2015-10-13 17:38:20.99 UTC","last_edit_date":"2015-10-08 13:46:35.513 UTC","last_editor_display_name":"","last_editor_user_id":"880772","owner_display_name":"","owner_user_id":"5061275","post_type_id":"1","score":"0","tags":"java|opensearch","view_count":"193"} {"id":"6719069","title":"WP7 7.1 (Mango) Database Versioning","body":"\u003cp\u003eI have a class structure that's saved to the local SQL DB on the phone. In the next version of the app, the class structure has changed.\u003c/p\u003e\n\n\u003cp\u003eHow does the SQL DB deserialize the data into the changed objects/structure?\u003c/p\u003e","accepted_answer_id":"6748873","answer_count":"1","comment_count":"0","creation_date":"2011-07-16 17:34:45.21 UTC","last_activity_date":"2011-07-28 23:27:26.86 UTC","last_edit_date":"2011-07-28 23:27:26.86 UTC","last_editor_display_name":"","last_editor_user_id":"149573","owner_display_name":"","owner_user_id":"68499","post_type_id":"1","score":"0","tags":"database|windows-phone-7","view_count":"242"} {"id":"37657474","title":"Google classroom api doesn't return the alias when a course is created","body":"\u003cp\u003eBackground: We currently have a database with every course, teacher and student in our school board. I am basically trying to build a system to sync this with our Google Classroom environment, so every teacher will have their courses, students will be enrolled ect.\u003c/p\u003e\n\n\u003cp\u003eProblem: We have over 8000 courses to create and want to use the batch system or at least create them asynchronously. We pass our internal unique course ID in the create call through the alias. However in the callback method this value is not passed back. This means we have no way of linking the google unique ID to ours, and no way of knowing if something goes wrong, which courses were not created.\u003c/p\u003e\n\n\u003cp\u003eExample: I want to create 5 courses with the following ids:\n1234\n1235\n1236\n1237\n1238\u003c/p\u003e\n\n\u003cp\u003eSo I create a batch request and the call back gets called 5 times. The data in the call back does not contain the IDs I sent in though if only contains the google IDs:\u003c/p\u003e\n\n\u003cp\u003e9876\n9875\nError\n9873\n9872\u003c/p\u003e\n\n\u003cp\u003eThe API specifically mentions that the order cannot be trusted. So how can I tell which google ID belong to which course and how can I tell witch course had the error?\u003c/p\u003e","answer_count":"1","comment_count":"2","creation_date":"2016-06-06 12:27:08.82 UTC","last_activity_date":"2016-08-10 19:10:24.06 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"5326876","post_type_id":"1","score":"0","tags":"google-classroom","view_count":"74"} {"id":"3761021","title":"Toolbar package missing in sdk 6.0","body":"\u003cp\u003e\u003cimg src=\"https://i.stack.imgur.com/AxN3P.png\" alt=\"alt text\"\u003e\u003c/p\u003e\n\n\u003cp\u003eI can not use the toolbar lib even with sdk 6.0\u003c/p\u003e\n\n\u003cp\u003eI am using 6.0\u003c/p\u003e\n\n\u003cp\u003eCan anyone help me .. i m stuck here \u003c/p\u003e","answer_count":"1","comment_count":"0","creation_date":"2010-09-21 13:55:38.947 UTC","last_activity_date":"2010-09-21 15:34:28.76 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"410693","post_type_id":"1","score":"0","tags":"java|blackberry","view_count":"36"} diff --git a/test/integration/README.md b/test/integration/README.md index a89863b30..a6935f1fa 100644 --- a/test/integration/README.md +++ b/test/integration/README.md @@ -5,15 +5,9 @@ Yes. ## Background -<<<<<<< HEAD -Elasticsearch offers its entire API via HTTP REST endpoints. You can find the whole API specification for every version [here](https://github.com/opensearch-project/OpenSearch/tree/main/rest-api-spec/src/main/resources/rest-api-spec/api).
-To support different languages at the same time, the Elasticsearch team decided to provide a [YAML specification](https://github.com/opensearch-project/OpenSearch/tree/main/rest-api-spec/src/main/resources/rest-api-spec/test) to test every endpoint, body, headers, warning, error and so on.
-This testing suite uses that specification to generate the test for the specified version of Elasticsearch on the fly. -======= -OpenSearchoffers its entire API via HTTP REST endpoints. You can find the whole API specification for every version [here](https://github.com/opensearch-project/opensearch/tree/master/rest-api-spec/src/main/resources/rest-api-spec/api).
-To support different languages at the same time, the OpenSearchteam decided to provide a [YAML specification](https://github.com/opensearch-project/opensearch/tree/master/rest-api-spec/src/main/resources/rest-api-spec/test) to test every endpoint, body, headers, warning, error and so on.
-This testing suite uses that specification to generate the test for the specified version of OpenSearchon the fly. ->>>>>>> Rename all elastic/elasticsearch/kibana references and clean up any remaining x-pack resources +OpenSearch offers its entire API via HTTP REST endpoints. You can find the whole API specification for every version [here](https://github.com/opensearch-project/OpenSearch/tree/master/rest-api-spec/src/main/resources/rest-api-spec/api).
+To support different languages at the same time, the OpenSearch team decided to provide a [YAML specification](https://github.com/opensearch-project/OpenSearch/tree/master/rest-api-spec/src/main/resources/rest-api-spec/test) to test every endpoint, body, headers, warning, error and so on.
+This testing suite uses that specification to generate the test for the specified version of OpenSearch on the fly. ## Run Run the testing suite is very easy, you just need to run the preconfigured npm script: @@ -21,8 +15,8 @@ Run the testing suite is very easy, you just need to run the preconfigured npm s npm run test:integration ``` -The first time you run this command, the OpenSearchrepository will be cloned inside the integration test folder, to be able to access the YAML specification, so it might take some time *(luckily, only the first time)*.
-Once the OpenSearchrepository has been cloned, the testing suite will connect to the provided OpenSearchinstance and then checkout the build hash in the repository. Finally, it will start running every test. +The first time you run this command, the OpenSearch repository will be cloned inside the integration test folder, to be able to access the YAML specification, so it might take some time *(luckily, only the first time)*.
+Once the OpenSearch repository has been cloned, the testing suite will connect to the provided OpenSearch instance and then checkout the build hash in the repository. Finally, it will start running every test. The specification does not allow the test to be run in parallel, so it might take a while to run the entire testing suite; on my machine, `MacBookPro15,2 core i7 2.7GHz 16GB of RAM` it takes around four minutes. diff --git a/test/integration/helpers/bulk.test.js b/test/integration/helpers/bulk.test.js index 665ddd9a5..e2489216f 100644 --- a/test/integration/helpers/bulk.test.js +++ b/test/integration/helpers/bulk.test.js @@ -40,7 +40,7 @@ const { Client } = require('../../../') const datasetPath = join(__dirname, '..', '..', 'fixtures', 'stackoverflow.ndjson') const INDEX = `test-helpers-${process.pid}` const client = new Client({ - node: process.env.TEST_ES_SERVER || 'http://localhost:9200' + node: process.env.TEST_OPENSEARCH_SERVER || 'http://localhost:9200' }) beforeEach(async () => { diff --git a/test/integration/helpers/msearch.test.js b/test/integration/helpers/msearch.test.js index 34aac839c..95d6444c9 100644 --- a/test/integration/helpers/msearch.test.js +++ b/test/integration/helpers/msearch.test.js @@ -39,7 +39,7 @@ const { Client, errors } = require('../../../') const INDEX = `test-helpers-${process.pid}` const client = new Client({ - node: process.env.TEST_ES_SERVER || 'http://localhost:9200' + node: process.env.TEST_OPENSEARCH_SERVER || 'http://localhost:9200' }) beforeEach(async () => { diff --git a/test/integration/helpers/scroll.test.js b/test/integration/helpers/scroll.test.js index 905ddf0dd..538cffebe 100644 --- a/test/integration/helpers/scroll.test.js +++ b/test/integration/helpers/scroll.test.js @@ -39,7 +39,7 @@ const { Client } = require('../../../') const INDEX = `test-helpers-${process.pid}` const client = new Client({ - node: process.env.TEST_ES_SERVER || 'http://localhost:9200' + node: process.env.TEST_OPENSEARCH_SERVER || 'http://localhost:9200' }) beforeEach(async () => { diff --git a/test/integration/helpers/search.test.js b/test/integration/helpers/search.test.js index 8a89f949b..de6a356aa 100644 --- a/test/integration/helpers/search.test.js +++ b/test/integration/helpers/search.test.js @@ -39,7 +39,7 @@ const { Client } = require('../../../') const INDEX = `test-helpers-${process.pid}` const client = new Client({ - node: process.env.TEST_ES_SERVER || 'http://localhost:9200' + node: process.env.TEST_OPENSEARCH_SERVER || 'http://localhost:9200' }) beforeEach(async () => { diff --git a/test/integration/index.js b/test/integration/index.js index a0c01fc39..10de596cd 100644 --- a/test/integration/index.js +++ b/test/integration/index.js @@ -55,7 +55,7 @@ const ossSkips = { 'cat.indices/10_basic.yml': ['Test cat indices output for closed index (pre 7.2.0)'], 'cluster.health/10_basic.yml': ['cluster health with closed index (pre 7.2.0)'], // TODO: remove this once 'arbitrary_key' is implemented - // https://github.com/opensearch-project/opensearch/pull/41492 + // https://github.com/elastic/elasticsearch/pull/41492 'indices.split/30_copy_settings.yml': ['*'], 'indices.stats/50_disk_usage.yml': ['Disk usage stats'], 'indices.stats/60_field_usage.yml': ['Field usage stats'], @@ -264,7 +264,7 @@ function generateJunitXmlReport (junit, suite) { } if (require.main === module) { - const node = process.env.TEST_ES_SERVER || 'http://localhost:9200' + const node = process.env.TEST_OPENSEARCH_SERVER || 'http://localhost:9200' const opts = { node } diff --git a/test/types/new-types.test-d.ts b/test/types/new-types.test-d.ts index ffc6d3243..2a435d1d4 100644 --- a/test/types/new-types.test-d.ts +++ b/test/types/new-types.test-d.ts @@ -29,7 +29,7 @@ */ import { expectType, expectNotType, expectError } from 'tsd' -import { Client, RequestEvent, ResurrectEvent, ApiError, ApiResponse, ostypes } from '../../' +import { Client, RequestEvent, ResurrectEvent, ApiError, ApiResponse, opensearchtypes } from '../../' import type { Client as NewTypes } from '../../api/new' import { TransportRequestPromise, Context } from '../../lib/Transport' @@ -62,7 +62,7 @@ client.on('resurrect', (err, meta) => { { const response = await client.cat.count({ index: 'test' }) - expectType(response.body) + expectType(response.body) expectType(response.meta.context) } @@ -70,34 +70,34 @@ client.on('resurrect', (err, meta) => { { const response = await client.cat.count({ index: 'test' }) - expectType(response.body) + expectType(response.body) expectType(response.meta.context) } // Check API returned type and optional parameters { const promise = client.info() - expectType>>(promise) + expectType>>(promise) promise - .then(result => expectType>(result)) + .then(result => expectType>(result)) .catch((err: ApiError) => expectType(err)) expectType(promise.abort()) } { const promise = client.info({ pretty: true }) - expectType>>(promise) + expectType>>(promise) promise - .then(result => expectType>(result)) + .then(result => expectType>(result)) .catch((err: ApiError) => expectType(err)) expectType(promise.abort()) } { const promise = client.info({ pretty: true }, { ignore: [404] }) - expectType>>(promise) + expectType>>(promise) promise - .then(result => expectType>(result)) + .then(result => expectType>(result)) .catch((err: ApiError) => expectType(err)) expectType(promise.abort()) } diff --git a/test/unit/connection.test.js b/test/unit/connection.test.js index 67cb1a197..7dfcd7719 100644 --- a/test/unit/connection.test.js +++ b/test/unit/connection.test.js @@ -755,7 +755,7 @@ test('connection.toJSON should hide agent, ssl and auth', t => { }) }) -// https://github.com/opensearch-project/opensearch-js/issues/843 +// https://github.com/elastic/elasticsearch-js/issues/843 test('Port handling', t => { t.test('http 80', t => { const connection = new Connection({ From 06a4cde9240e68aa3700047be3d12219fa4b0c4a Mon Sep 17 00:00:00 2001 From: Bishoy Boktor Date: Thu, 19 Aug 2021 19:22:00 +0000 Subject: [PATCH 04/10] Change OSAPI to OpenSearchAPI Signed-off-by: Bishoy Boktor --- api/index.js | 84 +++++++++++++++--------------- index.js | 4 +- scripts/utils/generateMain.js | 8 +-- test/fixtures/stackoverflow.ndjson | 2 +- 4 files changed, 49 insertions(+), 49 deletions(-) diff --git a/api/index.js b/api/index.js index 0f09e605a..0df211169 100644 --- a/api/index.js +++ b/api/index.js @@ -92,7 +92,7 @@ const kShutdown = Symbol('Shutdown') const kSnapshot = Symbol('Snapshot') const kTasks = Symbol('Tasks') -function OSAPI (opts) { +function OpenSearchAPI (opts) { this[kConfigurationError] = opts.ConfigurationError this[kCat] = null this[kCluster] = null @@ -106,47 +106,47 @@ function OSAPI (opts) { this[kTasks] = null } -OSAPI.prototype.bulk = bulkApi -OSAPI.prototype.clearScroll = clearScrollApi -OSAPI.prototype.count = countApi -OSAPI.prototype.create = createApi -OSAPI.prototype.delete = deleteApi -OSAPI.prototype.deleteByQuery = deleteByQueryApi -OSAPI.prototype.deleteByQueryRethrottle = deleteByQueryRethrottleApi -OSAPI.prototype.deleteScript = deleteScriptApi -OSAPI.prototype.exists = existsApi -OSAPI.prototype.existsSource = existsSourceApi -OSAPI.prototype.explain = explainApi -OSAPI.prototype.fieldCaps = fieldCapsApi -OSAPI.prototype.get = getApi -OSAPI.prototype.getScript = getScriptApi -OSAPI.prototype.getScriptContext = getScriptContextApi -OSAPI.prototype.getScriptLanguages = getScriptLanguagesApi -OSAPI.prototype.getSource = getSourceApi -OSAPI.prototype.index = indexApi -OSAPI.prototype.info = infoApi -OSAPI.prototype.mget = mgetApi -OSAPI.prototype.msearch = msearchApi -OSAPI.prototype.msearchTemplate = msearchTemplateApi -OSAPI.prototype.mtermvectors = mtermvectorsApi -OSAPI.prototype.ping = pingApi -OSAPI.prototype.putScript = putScriptApi -OSAPI.prototype.rankEval = rankEvalApi -OSAPI.prototype.reindex = reindexApi -OSAPI.prototype.reindexRethrottle = reindexRethrottleApi -OSAPI.prototype.renderSearchTemplate = renderSearchTemplateApi -OSAPI.prototype.scriptsPainlessExecute = scriptsPainlessExecuteApi -OSAPI.prototype.scroll = scrollApi -OSAPI.prototype.search = searchApi -OSAPI.prototype.searchShards = searchShardsApi -OSAPI.prototype.searchTemplate = searchTemplateApi -OSAPI.prototype.termsEnum = termsEnumApi -OSAPI.prototype.termvectors = termvectorsApi -OSAPI.prototype.update = updateApi -OSAPI.prototype.updateByQuery = updateByQueryApi -OSAPI.prototype.updateByQueryRethrottle = updateByQueryRethrottleApi +OpenSearchAPI.prototype.bulk = bulkApi +OpenSearchAPI.prototype.clearScroll = clearScrollApi +OpenSearchAPI.prototype.count = countApi +OpenSearchAPI.prototype.create = createApi +OpenSearchAPI.prototype.delete = deleteApi +OpenSearchAPI.prototype.deleteByQuery = deleteByQueryApi +OpenSearchAPI.prototype.deleteByQueryRethrottle = deleteByQueryRethrottleApi +OpenSearchAPI.prototype.deleteScript = deleteScriptApi +OpenSearchAPI.prototype.exists = existsApi +OpenSearchAPI.prototype.existsSource = existsSourceApi +OpenSearchAPI.prototype.explain = explainApi +OpenSearchAPI.prototype.fieldCaps = fieldCapsApi +OpenSearchAPI.prototype.get = getApi +OpenSearchAPI.prototype.getScript = getScriptApi +OpenSearchAPI.prototype.getScriptContext = getScriptContextApi +OpenSearchAPI.prototype.getScriptLanguages = getScriptLanguagesApi +OpenSearchAPI.prototype.getSource = getSourceApi +OpenSearchAPI.prototype.index = indexApi +OpenSearchAPI.prototype.info = infoApi +OpenSearchAPI.prototype.mget = mgetApi +OpenSearchAPI.prototype.msearch = msearchApi +OpenSearchAPI.prototype.msearchTemplate = msearchTemplateApi +OpenSearchAPI.prototype.mtermvectors = mtermvectorsApi +OpenSearchAPI.prototype.ping = pingApi +OpenSearchAPI.prototype.putScript = putScriptApi +OpenSearchAPI.prototype.rankEval = rankEvalApi +OpenSearchAPI.prototype.reindex = reindexApi +OpenSearchAPI.prototype.reindexRethrottle = reindexRethrottleApi +OpenSearchAPI.prototype.renderSearchTemplate = renderSearchTemplateApi +OpenSearchAPI.prototype.scriptsPainlessExecute = scriptsPainlessExecuteApi +OpenSearchAPI.prototype.scroll = scrollApi +OpenSearchAPI.prototype.search = searchApi +OpenSearchAPI.prototype.searchShards = searchShardsApi +OpenSearchAPI.prototype.searchTemplate = searchTemplateApi +OpenSearchAPI.prototype.termsEnum = termsEnumApi +OpenSearchAPI.prototype.termvectors = termvectorsApi +OpenSearchAPI.prototype.update = updateApi +OpenSearchAPI.prototype.updateByQuery = updateByQueryApi +OpenSearchAPI.prototype.updateByQueryRethrottle = updateByQueryRethrottleApi -Object.defineProperties(OSAPI.prototype, { +Object.defineProperties(OpenSearchAPI.prototype, { cat: { get () { if (this[kCat] === null) { @@ -251,4 +251,4 @@ Object.defineProperties(OSAPI.prototype, { update_by_query_rethrottle: { get () { return this.updateByQueryRethrottle } } }) -module.exports = OSAPI +module.exports = OpenSearchAPI diff --git a/index.js b/index.js index 50a9ef246..e8c9c3234 100644 --- a/index.js +++ b/index.js @@ -53,9 +53,9 @@ const kChild = Symbol('opensearchjs-child') const kExtensions = Symbol('opensearchjs-extensions') const kEventEmitter = Symbol('opensearchjs-event-emitter') -const OSAPI = require('./api') +const OpenSearchAPI = require('./api') -class Client extends OSAPI { +class Client extends OpenSearchAPI { constructor (opts = {}) { super({ ConfigurationError }) if (opts.cloud && opts[kChild] === undefined) { diff --git a/scripts/utils/generateMain.js b/scripts/utils/generateMain.js index 82b1d8477..a734ecdc6 100644 --- a/scripts/utils/generateMain.js +++ b/scripts/utils/generateMain.js @@ -148,14 +148,14 @@ function genFactory (folder, specFolder, namespaces) { getters.push(`${namespace}: { get () { return this.${camelify(namespace)} } },\n`) } } else { - apisStr += `OSAPI.prototype.${camelify(namespace)} = ${camelify(namespace)}Api\n` + apisStr += `OpenSearchAPI.prototype.${camelify(namespace)} = ${camelify(namespace)}Api\n` if (namespace.includes('_')) { getters.push(`${namespace}: { get () { return this.${camelify(namespace)} } },\n`) } } } - apisStr += '\nObject.defineProperties(OSAPI.prototype, {\n' + apisStr += '\nObject.defineProperties(OpenSearchAPI.prototype, {\n' for (const getter of getters) { apisStr += getter } @@ -201,14 +201,14 @@ function genFactory (folder, specFolder, namespaces) { const { kConfigurationError } = require('./utils') ${symbols} - function OSAPI (opts) { + function OpenSearchAPI (opts) { this[kConfigurationError] = opts.ConfigurationError ${symbolsInstance} } ${apisStr} - module.exports = OSAPI + module.exports = OpenSearchAPI ` // new line at the end of file diff --git a/test/fixtures/stackoverflow.ndjson b/test/fixtures/stackoverflow.ndjson index eb1592d29..3615e86fa 100644 --- a/test/fixtures/stackoverflow.ndjson +++ b/test/fixtures/stackoverflow.ndjson @@ -4330,7 +4330,7 @@ {"id":"15563220","title":"Login In Storyboard","body":"\u003cp\u003eI have to implement login functionality to my application. The appropriate credentials like \"Username\" \u0026amp; \"Password\" are saved in an XML file. My requirement is to create an application using storyboard.\u003c/p\u003e\n\n\u003cp\u003eThe segue should have a conditional property and the next view should only be displayed when the correct credentials are entered.\u003c/p\u003e\n\n\u003cp\u003eIf i could get a tutorial to explain the same it would be great.\u003c/p\u003e","answer_count":"0","comment_count":"2","creation_date":"2013-03-22 05:17:09.337 UTC","last_activity_date":"2013-03-22 05:17:09.337 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"2173317","post_type_id":"1","score":"0","tags":"xml|login|xcode-storyboard","view_count":"54"} {"id":"45885815","title":"Is there an automated way to see if AWS account has Premium Support Subscription","body":"\u003cp\u003eI have multiple AWS accounts and need an automated way (CLI or SDK) to find out if the account has a Premium Support Subscription.\u003c/p\u003e\n\n\u003cp\u003eEssentially I want to know whether I can use cloudwatch events from Trusted Advisor to trigger Lambda functions on a particular account.\u003c/p\u003e\n\n\u003cp\u003eOn the cli I can run:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eaws support \u0026lt;command\u0026gt;\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eand will get an error if Premium Support isn't enabled, but is there a better way to find this out?\u003c/p\u003e","answer_count":"1","comment_count":"0","creation_date":"2017-08-25 16:30:44.44 UTC","last_activity_date":"2017-08-25 16:46:13.763 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"1706504","post_type_id":"1","score":"1","tags":"amazon-web-services|aws-sdk|aws-cli","view_count":"53"} {"id":"43622460","title":"PassportJS's serializeUser not setting session","body":"\u003cp\u003e** I just discovered that this works on Heroku, but not on my local machine **\u003c/p\u003e\n\n\u003cp\u003eAfter the user goes through the signup process I call authenticate, which is supposed to call login by default, but I put an extra call to login for good measure to set the session, and still the session isn't storing the \u003ccode\u003eserializeUser\u003c/code\u003e contents of \u003ccode\u003e_id\u003c/code\u003e. The end goal is to call \u003ccode\u003ereq.user\u003c/code\u003e to get the currently logged in user, but nothing is returned.\u003c/p\u003e\n\n\u003cul\u003e\n\u003cli\u003e\u003cp\u003eI'm running my Angular2 app on \u003ccode\u003elocalhost:4200\u003c/code\u003e, NodeJS server on \u003ccode\u003elocalhost:8080\u003c/code\u003e, and my redis server from port \u003ccode\u003e6379\u003c/code\u003e.\u003c/p\u003e\u003c/li\u003e\n\u003cli\u003e\u003cp\u003eThe user id to user vice versa in \u003ccode\u003eserializeUser\u003c/code\u003e and \u003ccode\u003edeserializeUser\u003c/code\u003e is correct.\u003c/p\u003e\u003c/li\u003e\n\u003cli\u003e\u003cp\u003eMy \u003ccode\u003eserializeUser\u003c/code\u003e function is called, but my \u003ccode\u003edeserializeUser\u003c/code\u003e function isn't.\u003c/p\u003e\u003c/li\u003e\n\u003cli\u003e\u003cp\u003eMy redis server does work and is receiving session strings, but they are formatted incorrectly like below.\u003c/p\u003e\u003c/li\u003e\n\u003c/ul\u003e\n\n\u003cp\u003eHere's my session, it's missing the \u003ccode\u003e_id\u003c/code\u003e (user id) data.\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e{\"data\":{\"cookie\":{\"originalMaxAge\":300000,\"expires\":\"2017-04-25T22:43:32.605Z\",\"secure\":false,\"httpOnly\":true,\"path\":\"/\"}}}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eserver.js\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e...\nconst passport = require(\"passport\");\nconst LocalStrategy = require(\"passport-local\").Strategy;\nconst session = require(\"express-session\");\nvar RedisStore = require(\"connect-redis\")(session);\n\napp.use(function(req, res, next){\n res.header(\"Access-Control-Allow-Origin\", \"*\");\n res.header(\"Access-Control-Allow-Headers\", \"Origin, X-Requested-With, Content-Type, Accept\");\n res.header('Access-Control-Allow-Methods', 'GET, POST, OPTIONS, PUT, PATCH, DELETE');\n res.header('Access-Control-Allow-Credentials', \"true\");\n next();\n});\n\nvar options = {\n \"port\": nconf.get(\"redis:urlPort\"),\n \"host\": nconf.get(\"redis:urlURI\")\n};\n\napp.use(session({\n store: new RedisStore(options),\n cookie: {\n secure: false,\n maxAge: 300000\n },\n secret: 'starbucks-sucks',\n resave: true, \n saveUninitialized: true \n}));\n\napp.use(passport.initialize());\napp.use(passport.session());\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eserver.js: serializeUser \u0026amp; deserializeUser\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003epassport.serializeUser(function(user, done) {\n done(null, user._id);\n});\n\npassport.deserializeUser(function(user_id, done) {\n db.collection(USERS_COLLECTION).findOne({_id: new ObjectID(user_id) }, function(err, doc){\n if(err){\n handleError(res, err.message, \"Failed to get user\");\n } else{\n done(null, doc);\n }\n });\n});\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eserver.js: other\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003epassport.use(new LocalStrategy({\n usernameField: \"phone\",\n passwordField: \"auth_code\"\n},function(username, password, done) {\n db.collection(USERS_COLLECTION).findOne({\"phone\": username})\n .then(function(data){\n if(data.auth_code === password){ return done(null, data); }\n else{ done(null, false, {message: \"Verification code is incorrect.\"}); }\n }, function(err){\n done(err);\n });\n}));\n\napp.post(\"/login\", function(req, res, next) {\n passport.authenticate(\"local\", function (err, user, info) {\n if (err) { next(err); }\n if (!user){ next(err); }\n\n req.login(user, function(err){\n if(err){ return next(err); }\n res.status(200).json(user);\n });\n })(req, res, next);\n});\n\u003c/code\u003e\u003c/pre\u003e","answer_count":"0","comment_count":"0","creation_date":"2017-04-25 23:14:13.467 UTC","last_activity_date":"2017-04-26 03:07:17.7 UTC","last_edit_date":"2017-04-26 03:07:17.7 UTC","last_editor_display_name":"","last_editor_user_id":"924814","owner_display_name":"","owner_user_id":"924814","post_type_id":"1","score":"0","tags":"node.js|session|redis|passport.js|passport-local","view_count":"38"} -{"id":"24756748","title":"how to parse JSON data with the HTTP post method in android?","body":"\u003cp\u003eI want to parse JSON data as a string parameter to the web service. \u003c/p\u003e\n\n\u003cp\u003eMy class is mentioned below.\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e@Override\nprotected String doInBackground(String... params) {\n // TODO Auto-generated method stub\n HttpClient httpclient = new DefaultHttpClient();\n //URL url = new URL(\"http://192.168.1.44:8080/api/BeVoPOSAPI/checklogin?nodeid=2\");\n //Log.d(\"shankar: \", ip+\":\"+port+\"/\"+node);\n //String url = \"http://\"+ip+\":\"+port+\"/api/BeVoPOSAPI/checklogin?nodeid=\"+node+\"\u0026amp;login=\";\n //String url = \"http://\"+ip+\":\"+port+\"/api/BeVoPOSAPI/checklogin?nodeid=\"+node+\"\u0026amp;login=\";\n //String url = \"http://192.168.1.60:8081/api/BeVoPOSAPI/checklogin?nodeid=2\u0026amp;login=\";\n String url = \"http://ipa.azurewebsites.net/pos/savecheck?nodeid=2\u0026amp;checkxml=\";\n try {\n // Add your data\n String checkxml = new String(params[0]);\n ;\n url = url.concat(checkxml);\n Log.d(\"password\", checkxml);\n //HttpPost httppost = new HttpPost(url);\n\n //HttpParams httpParameters = new BasicHttpParams();\n //HttpConnectionParams.setConnectionTimeout(httpParameters, 1000);\n //HttpConnectionParams.setSoTimeout(httpParameters, 1000);\n\n //HttpClient httpClient = new DefaultHttpClient(httpParameters);\n //HttpContext localContext = new BasicHttpContext();\n\n HttpPost httpget = new HttpPost(url);\n HttpParams httpParameters = new BasicHttpParams();\n // Set the timeout in milliseconds until a connection is established.\n // The default value is zero, that means the timeout is not used.\n int timeoutConnection = 300;\n HttpConnectionParams.setConnectionTimeout(httpParameters, timeoutConnection);\n // Set the default socket timeout (SO_TIMEOUT)\n // in milliseconds which is the timeout for waiting for data.\n int timeoutSocket = 500;\n HttpConnectionParams.setSoTimeout(httpParameters, timeoutSocket);\n /*List\u0026lt;NameValuePair\u0026gt; nameValuePairs = new ArrayList\u0026lt;NameValuePair\u0026gt;(1);\n Log.d(\"password\", password_check);\n nameValuePairs.add(new BasicNameValuePair(\"login\", password_check));\n httppost.setEntity(new UrlEncodedFormEntity(nameValuePairs));*/\n // Execute HTTP Post Request\n DefaultHttpClient httpClient = new DefaultHttpClient(httpParameters);\n httpClient.setParams(httpParameters);\n HttpResponse response = httpclient.execute(httpget);\n Log.d(\"Status\", response.toString());\n int responseCode = response.getStatusLine().getStatusCode();\n String str = Integer.toString(responseCode);\n Log.d(\"Responce code\", str);\n switch (responseCode) {\n case 200:\n HttpEntity entity = response.getEntity();\n if (entity != null) {\n String responseBody = EntityUtils.toString(entity);\n Log.d(\"Responce\", responseBody.toString());\n String jsonString = responseBody.toString();\n\n }\n break;\n }\n } catch (SocketTimeoutException e) {\n error = \"SocketTimeoutException\";\n } catch (ConnectTimeoutException e) {\n error = \"connectionTimeoutException\";\n } catch (ClientProtocolException e) {\n // TODO Auto-generated catch block\n //Log.d(\"Error\", e.toString());\n error = \"ClientProtocolException\";\n } catch (IOException e) {\n // TODO Auto-generated catch block\n //Log.d(\"Error\", e.toString());\n error = \"IOException\";\n }\n return null;\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eI parsed the checkxml string from another method.\u003c/p\u003e\n\n\u003cp\u003eThe checkxml consists of the details below as a string.\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e {\n \"layoutid\": 1,\n \"total\": \"2.95\",\n \"checkdiscountpercentage\": 0,\n \"gratuityid\": \"\",\n \"status\": 141,\n \"checkdiscountshiftlevelid\": \"\",\n \"checktimeeventid\": \"\",\n \"isprintonbill\": \"\",\n \"userid\": 1,\n \"gratuitypercentage\": \"\",\n \"checkdiscountreason\": \"\",\n \"ordertype\": 210,\n \"noofcustomer\": 1,\n \"generatedon\": \"\",\n \"istaxexcemt\": 0,\n \"checkdefinitiontype\": \"\",\n \"tableid\": 1,\n \"customerid\": 0,\n \"ticket\": \"new\",\n \"checkdiscountamount\": \"0\",\n \"tablename\": 100,\n \"checkdiscountistaxadjust\": \"1\",\n \"checkdiscounttax\": \"0\",\n \"products\": [\n {\n \"menuitemname\": \"2\",\n \"menuitemid\": 1,\n \"reason\": \"\",\n \"discountpercentage\": 0,\n \"seatid\": 1,\n \"timeeventid\": \"\",\n \"SaleDetailsMenuItem_ID\": \"2\",\n \"istaxexcemt\": \"2\",\n \"taxamount\": \"0.2100\",\n \"discounttax\": \"0\",\n \"definitiontype\": \"\",\n \"modifiers\": [\n {}\n ],\n \"discountamount\": \"0\",\n \"istaxinclude\": \"2\",\n \"seatname\": \"\",\n \"shiftlevelid\": \"2\",\n \"discountshiftlevelid\": \"\",\n \"discountreason\": \"\",\n \"status\": \"2\",\n \"coursingid\": \"\",\n \"qty\": 2,\n \"ordertype\": \"\",\n \"taxpercent\": \"2\",\n \"taxids\": [\n {\n \"taxpercent\": \"7\",\n \"Amount\": \"0.21\",\n \"taxid\": \"1\"\n }\n ],\n \"holdtime\": 0,\n \"price\": 2.95,\n \"discountistaxadjust\": 1,\n \"price2\": 3\n }\n ]\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eIt threw an \u003ccode\u003eillegal argument\u003c/code\u003e and \u003ccode\u003ethread pool exception\u003c/code\u003e. Please let me know how to parse this data as a parameter to the above url.\u003c/p\u003e","accepted_answer_id":"24756954","answer_count":"2","comment_count":"4","creation_date":"2014-07-15 11:14:17.667 UTC","favorite_count":"2","last_activity_date":"2015-05-09 10:26:50.693 UTC","last_edit_date":"2015-05-09 10:26:50.693 UTC","last_editor_display_name":"user3787700","last_editor_user_id":"325479","owner_display_name":"","owner_user_id":"2512822","post_type_id":"1","score":"1","tags":"java|android|json|http|httprequest","view_count":"7012"} +{"id":"24756748","title":"how to parse JSON data with the HTTP post method in android?","body":"\u003cp\u003eI want to parse JSON data as a string parameter to the web service. \u003c/p\u003e\n\n\u003cp\u003eMy class is mentioned below.\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e@Override\nprotected String doInBackground(String... params) {\n // TODO Auto-generated method stub\n HttpClient httpclient = new DefaultHttpClient();\n //URL url = new URL(\"http://192.168.1.44:8080/api/BeVoPOpenSearchAPI/checklogin?nodeid=2\");\n //Log.d(\"shankar: \", ip+\":\"+port+\"/\"+node);\n //String url = \"http://\"+ip+\":\"+port+\"/api/BeVoPOpenSearchAPI/checklogin?nodeid=\"+node+\"\u0026amp;login=\";\n //String url = \"http://\"+ip+\":\"+port+\"/api/BeVoPOpenSearchAPI/checklogin?nodeid=\"+node+\"\u0026amp;login=\";\n //String url = \"http://192.168.1.60:8081/api/BeVoPOpenSearchAPI/checklogin?nodeid=2\u0026amp;login=\";\n String url = \"http://ipa.azurewebsites.net/pos/savecheck?nodeid=2\u0026amp;checkxml=\";\n try {\n // Add your data\n String checkxml = new String(params[0]);\n ;\n url = url.concat(checkxml);\n Log.d(\"password\", checkxml);\n //HttpPost httppost = new HttpPost(url);\n\n //HttpParams httpParameters = new BasicHttpParams();\n //HttpConnectionParams.setConnectionTimeout(httpParameters, 1000);\n //HttpConnectionParams.setSoTimeout(httpParameters, 1000);\n\n //HttpClient httpClient = new DefaultHttpClient(httpParameters);\n //HttpContext localContext = new BasicHttpContext();\n\n HttpPost httpget = new HttpPost(url);\n HttpParams httpParameters = new BasicHttpParams();\n // Set the timeout in milliseconds until a connection is established.\n // The default value is zero, that means the timeout is not used.\n int timeoutConnection = 300;\n HttpConnectionParams.setConnectionTimeout(httpParameters, timeoutConnection);\n // Set the default socket timeout (SO_TIMEOUT)\n // in milliseconds which is the timeout for waiting for data.\n int timeoutSocket = 500;\n HttpConnectionParams.setSoTimeout(httpParameters, timeoutSocket);\n /*List\u0026lt;NameValuePair\u0026gt; nameValuePairs = new ArrayList\u0026lt;NameValuePair\u0026gt;(1);\n Log.d(\"password\", password_check);\n nameValuePairs.add(new BasicNameValuePair(\"login\", password_check));\n httppost.setEntity(new UrlEncodedFormEntity(nameValuePairs));*/\n // Execute HTTP Post Request\n DefaultHttpClient httpClient = new DefaultHttpClient(httpParameters);\n httpClient.setParams(httpParameters);\n HttpResponse response = httpclient.execute(httpget);\n Log.d(\"Status\", response.toString());\n int responseCode = response.getStatusLine().getStatusCode();\n String str = Integer.toString(responseCode);\n Log.d(\"Responce code\", str);\n switch (responseCode) {\n case 200:\n HttpEntity entity = response.getEntity();\n if (entity != null) {\n String responseBody = EntityUtils.toString(entity);\n Log.d(\"Responce\", responseBody.toString());\n String jsonString = responseBody.toString();\n\n }\n break;\n }\n } catch (SocketTimeoutException e) {\n error = \"SocketTimeoutException\";\n } catch (ConnectTimeoutException e) {\n error = \"connectionTimeoutException\";\n } catch (ClientProtocolException e) {\n // TODO Auto-generated catch block\n //Log.d(\"Error\", e.toString());\n error = \"ClientProtocolException\";\n } catch (IOException e) {\n // TODO Auto-generated catch block\n //Log.d(\"Error\", e.toString());\n error = \"IOException\";\n }\n return null;\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eI parsed the checkxml string from another method.\u003c/p\u003e\n\n\u003cp\u003eThe checkxml consists of the details below as a string.\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e {\n \"layoutid\": 1,\n \"total\": \"2.95\",\n \"checkdiscountpercentage\": 0,\n \"gratuityid\": \"\",\n \"status\": 141,\n \"checkdiscountshiftlevelid\": \"\",\n \"checktimeeventid\": \"\",\n \"isprintonbill\": \"\",\n \"userid\": 1,\n \"gratuitypercentage\": \"\",\n \"checkdiscountreason\": \"\",\n \"ordertype\": 210,\n \"noofcustomer\": 1,\n \"generatedon\": \"\",\n \"istaxexcemt\": 0,\n \"checkdefinitiontype\": \"\",\n \"tableid\": 1,\n \"customerid\": 0,\n \"ticket\": \"new\",\n \"checkdiscountamount\": \"0\",\n \"tablename\": 100,\n \"checkdiscountistaxadjust\": \"1\",\n \"checkdiscounttax\": \"0\",\n \"products\": [\n {\n \"menuitemname\": \"2\",\n \"menuitemid\": 1,\n \"reason\": \"\",\n \"discountpercentage\": 0,\n \"seatid\": 1,\n \"timeeventid\": \"\",\n \"SaleDetailsMenuItem_ID\": \"2\",\n \"istaxexcemt\": \"2\",\n \"taxamount\": \"0.2100\",\n \"discounttax\": \"0\",\n \"definitiontype\": \"\",\n \"modifiers\": [\n {}\n ],\n \"discountamount\": \"0\",\n \"istaxinclude\": \"2\",\n \"seatname\": \"\",\n \"shiftlevelid\": \"2\",\n \"discountshiftlevelid\": \"\",\n \"discountreason\": \"\",\n \"status\": \"2\",\n \"coursingid\": \"\",\n \"qty\": 2,\n \"ordertype\": \"\",\n \"taxpercent\": \"2\",\n \"taxids\": [\n {\n \"taxpercent\": \"7\",\n \"Amount\": \"0.21\",\n \"taxid\": \"1\"\n }\n ],\n \"holdtime\": 0,\n \"price\": 2.95,\n \"discountistaxadjust\": 1,\n \"price2\": 3\n }\n ]\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eIt threw an \u003ccode\u003eillegal argument\u003c/code\u003e and \u003ccode\u003ethread pool exception\u003c/code\u003e. Please let me know how to parse this data as a parameter to the above url.\u003c/p\u003e","accepted_answer_id":"24756954","answer_count":"2","comment_count":"4","creation_date":"2014-07-15 11:14:17.667 UTC","favorite_count":"2","last_activity_date":"2015-05-09 10:26:50.693 UTC","last_edit_date":"2015-05-09 10:26:50.693 UTC","last_editor_display_name":"user3787700","last_editor_user_id":"325479","owner_display_name":"","owner_user_id":"2512822","post_type_id":"1","score":"1","tags":"java|android|json|http|httprequest","view_count":"7012"} {"id":"39529166","title":"$setValidity is not working for my validation","body":"\u003cp\u003eI have a form called ganesConfig and based on some condition i want to show the error message.\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e\u0026lt;form method=\"post\" name=\"gamesConfig\" novalidate\u0026gt;\n \u0026lt;p ng-show=\"gamesConfig.selectedGames.$invalid.gamesduplicate\"\u0026gt;Already Exists. Please try another\u0026lt;/p\u0026gt;\n\u0026lt;/form\u0026gt;\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003ethe condition is as follows\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e$scope.gamesConfig.selectedGames.$setValidity(\"gamesduplicate\", false);\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eBut not showing the error message.\u003c/p\u003e","answer_count":"1","comment_count":"2","creation_date":"2016-09-16 10:25:27.927 UTC","last_activity_date":"2016-09-16 14:12:01.183 UTC","last_edit_date":"2016-09-16 10:49:55.087 UTC","last_editor_display_name":"","last_editor_user_id":"5471522","owner_display_name":"","owner_user_id":"5471522","post_type_id":"1","score":"0","tags":"angularjs","view_count":"354"} {"id":"47106087","title":"Bokeh simple bar chart","body":"\u003cp\u003eI'm new to the method. I created the following input, but it gives me an empty output. What did I miss? Thank you.\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eimport pandas as pd\nfrom bokeh.charts import Bar\nimport pandas as pd\nfrom bokeh.plotting import figure, output_file, show\nmortality_age = pd.read_csv(\"mortality_by_age.csv\")\nx=mortality_age[\"Age Range\"]\ny=mortality_age[\"Deaths per 100,000 Live Births:\"]\nplot = figure(title=\"Example of a vertical bar chart\")\nplot.vbar(x, top=y, width=0.5,color=\"#CAB2D6\")\noutput_file(\"vertical_bar.html\", mode=\"inline\")\nshow(plot)\n\u003c/code\u003e\u003c/pre\u003e","answer_count":"1","comment_count":"0","creation_date":"2017-11-04 00:21:35.757 UTC","last_activity_date":"2017-11-05 09:06:22.44 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"8678766","post_type_id":"1","score":"0","tags":"bokeh","view_count":"35"} {"id":"12424346","title":"Make a bouncing ListView in Android","body":"\u003cp\u003eI'm trying to make my listview \"bounce\". To explain myself, I want the ListView to have the same behavior as the iOs List View object. On the top and on the bottom of the list, the user can go over the list by swiping his finger.\u003c/p\u003e\n\n\u003cp\u003eThat behavior existed on Android 2.2 Samsung devices (Galaxy Tab GT1000 for instance).\u003c/p\u003e\n\n\u003cp\u003eOn the most devices I tested, the list is now acting different on scrolling, it displays a blue line that gets brighter when you swipe your finger moreover.\u003c/p\u003e\n\n\u003cp\u003eI found the BounceListView like this one :\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003epublic class BounceListView extends ListView\n{\n private static final int MAX_Y_OVERSCROLL_DISTANCE = 200;\n\n private Context mContext;\n private int mMaxYOverscrollDistance;\n\n public BounceListView(Context context) \n {\n super(context);\n mContext = context;\n initBounceListView();\n }\n\n public BounceListView(Context context, AttributeSet attrs) \n {\n super(context, attrs);\n mContext = context;\n initBounceListView();\n }\n\n public BounceListView(Context context, AttributeSet attrs, int defStyle) \n {\n super(context, attrs, defStyle);\n mContext = context;\n initBounceListView();\n }\n\n private void initBounceListView()\n {\n //get the density of the screen and do some maths with it on the max overscroll distance\n //variable so that you get similar behaviors no matter what the screen size\n\n final DisplayMetrics metrics = mContext.getResources().getDisplayMetrics();\n final float density = metrics.density;\n\n mMaxYOverscrollDistance = (int) (density * MAX_Y_OVERSCROLL_DISTANCE);\n }\n\n @Override\n protected boolean overScrollBy(int deltaX, int deltaY, int scrollX, int scrollY, int scrollRangeX, int scrollRangeY, int maxOverScrollX, int maxOverScrollY, boolean isTouchEvent) \n { \n //This is where the magic happens, we have replaced the incoming maxOverScrollY with our own custom variable mMaxYOverscrollDistance; \n return super.overScrollBy(deltaX, deltaY, scrollX, scrollY, scrollRangeX, scrollRangeY, maxOverScrollX, mMaxYOverscrollDistance, isTouchEvent); \n }\n\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eBut the problem of this ListView is that it doesn't go back to the first or to the last item after a scroll over the list... It stays on a position where list is not filled.\u003c/p\u003e\n\n\u003cp\u003eAnyone got an idea to make it work ?\u003c/p\u003e\n\n\u003cp\u003eThanks in advance!\u003c/p\u003e","answer_count":"1","comment_count":"1","creation_date":"2012-09-14 12:13:42.843 UTC","favorite_count":"2","last_activity_date":"2014-05-06 09:36:06.443 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"969881","post_type_id":"1","score":"4","tags":"android|listview","view_count":"6485"} From 177cfc1f60144cb732ead7d1ce4abea9f505b6fb Mon Sep 17 00:00:00 2001 From: Bishoy Boktor Date: Fri, 20 Aug 2021 16:44:01 +0000 Subject: [PATCH 05/10] Remove docs Signed-off-by: Bishoy Boktor --- docs/examples/asStream.asciidoc | 104 ---------- docs/examples/bulk.asciidoc | 92 --------- docs/examples/exists.asciidoc | 34 --- docs/examples/get.asciidoc | 34 --- docs/examples/ignore.asciidoc | 62 ------ docs/examples/index.asciidoc | 36 ---- docs/examples/msearch.asciidoc | 57 ------ docs/examples/proxy/.gitignore | 51 ----- docs/examples/proxy/README.md | 56 ----- docs/examples/proxy/api/autocomplete.js | 116 ----------- docs/examples/proxy/api/delete.js | 85 -------- docs/examples/proxy/api/index.js | 87 -------- docs/examples/proxy/api/search.js | 97 --------- docs/examples/proxy/package.json | 19 -- docs/examples/proxy/utils/authorize.js | 65 ------ .../proxy/utils/prepare-elasticsearch.js | 78 ------- docs/examples/proxy/vercel.json | 13 -- docs/examples/reindex.asciidoc | 75 ------- docs/examples/scroll.asciidoc | 193 ------------------ docs/examples/search.asciidoc | 63 ------ docs/examples/suggest.asciidoc | 66 ------ docs/examples/transport.request.asciidoc | 71 ------- docs/examples/typescript.asciidoc | 72 ------- docs/examples/update-by-query.asciidoc | 59 ------ docs/examples/update.asciidoc | 92 --------- docs/examples/update_by_query.asciidoc | 59 ------ 26 files changed, 1836 deletions(-) delete mode 100644 docs/examples/asStream.asciidoc delete mode 100644 docs/examples/bulk.asciidoc delete mode 100644 docs/examples/exists.asciidoc delete mode 100644 docs/examples/get.asciidoc delete mode 100644 docs/examples/ignore.asciidoc delete mode 100644 docs/examples/index.asciidoc delete mode 100644 docs/examples/msearch.asciidoc delete mode 100644 docs/examples/proxy/.gitignore delete mode 100644 docs/examples/proxy/README.md delete mode 100644 docs/examples/proxy/api/autocomplete.js delete mode 100644 docs/examples/proxy/api/delete.js delete mode 100644 docs/examples/proxy/api/index.js delete mode 100644 docs/examples/proxy/api/search.js delete mode 100644 docs/examples/proxy/package.json delete mode 100644 docs/examples/proxy/utils/authorize.js delete mode 100644 docs/examples/proxy/utils/prepare-elasticsearch.js delete mode 100644 docs/examples/proxy/vercel.json delete mode 100644 docs/examples/reindex.asciidoc delete mode 100644 docs/examples/scroll.asciidoc delete mode 100644 docs/examples/search.asciidoc delete mode 100644 docs/examples/suggest.asciidoc delete mode 100644 docs/examples/transport.request.asciidoc delete mode 100644 docs/examples/typescript.asciidoc delete mode 100644 docs/examples/update-by-query.asciidoc delete mode 100644 docs/examples/update.asciidoc delete mode 100644 docs/examples/update_by_query.asciidoc diff --git a/docs/examples/asStream.asciidoc b/docs/examples/asStream.asciidoc deleted file mode 100644 index 1333e4104..000000000 --- a/docs/examples/asStream.asciidoc +++ /dev/null @@ -1,104 +0,0 @@ -[[as_stream_examples]] -=== asStream - -Instead of getting the parsed body back, you will get the raw Node.js stream of -data. - -[source,js] ----- -'use strict' - -const { Client } = require('@opensearch/opensearch') -const client = new Client({ node: 'http://localhost:9200' }) - -async function run () { - const { body: bulkResponse } = await client.bulk({ - refresh: true, - body: [ - // operation to perform - { index: { _index: 'game-of-thrones' } }, - // the document to index - { - character: 'Ned Stark', - quote: 'Winter is coming.' - }, - - { index: { _index: 'game-of-thrones' } }, - { - character: 'Daenerys Targaryen', - quote: 'I am the blood of the dragon.' - }, - - { index: { _index: 'game-of-thrones' } }, - { - character: 'Tyrion Lannister', - quote: 'A mind needs books like a sword needs a whetstone.' - } - ] - }) - - if (bulkResponse.errors) { - console.log(bulkResponse) - process.exit(1) - } - - // Let's search! - const { body } = await client.search({ - index: 'game-of-thrones', - body: { - query: { - match: { - quote: 'winter' - } - } - } - }, { - asStream: true - }) - - // stream async iteration, available in Node.js ≥ 10 - let payload = '' - body.setEncoding('utf8') - for await (const chunk of body) { - payload += chunk - } - console.log(JSON.parse(payload)) - - // classic stream callback style - let payload = '' - body.setEncoding('utf8') - body.on('data', chunk => { payload += chunk }) - body.on('error', console.log) - body.on('end', () => { - console.log(JSON.parse(payload)) - }) -} - -run().catch(console.log) ----- - -TIP: This can be useful if you need to pipe the {opensearch}'s response to a proxy, or -send it directly to another source. - -[source,js] ----- -'use strict' - -const { Client } = require('@opensearch/opensearch') -const client = new Client({ node: 'http://localhost:9200' }) -const fastify = require('fastify')() - -fastify.post('/search/:index', async (req, reply) => { - const { body, statusCode, headers } = await client.search({ - index: req.params.index, - body: req.body - }, { - asStream: true - }) - - reply.code(statusCode).headers(headers) - return body -}) - -fastify.listen(3000) ----- diff --git a/docs/examples/bulk.asciidoc b/docs/examples/bulk.asciidoc deleted file mode 100644 index d9198baca..000000000 --- a/docs/examples/bulk.asciidoc +++ /dev/null @@ -1,92 +0,0 @@ -[[bulk_examples]] -=== Bulk - -The `bulk` API makes it possible to perform many index/delete operations in a -single API call. This can greatly increase the indexing speed. - -NOTE: Did you know that we provide an helper for sending bulk request? You can find it {jsclient}/client-helpers.html[here]. - -[source,js] ----- -'use strict' - -require('array.prototype.flatmap').shim() -const { Client } = require('@opensearch/opensearch') -const client = new Client({ - node: 'http://localhost:9200' -}) - -async function run () { - await client.indices.create({ - index: 'tweets', - body: { - mappings: { - properties: { - id: { type: 'integer' }, - text: { type: 'text' }, - user: { type: 'keyword' }, - time: { type: 'date' } - } - } - } - }, { ignore: [400] }) - - const dataset = [{ - id: 1, - text: 'If I fall, don\'t bring me back.', - user: 'jon', - date: new Date() - }, { - id: 2, - text: 'Winter is coming', - user: 'ned', - date: new Date() - }, { - id: 3, - text: 'A Lannister always pays his debts.', - user: 'tyrion', - date: new Date() - }, { - id: 4, - text: 'I am the blood of the dragon.', - user: 'daenerys', - date: new Date() - }, { - id: 5, // change this value to a string to see the bulk response with errors - text: 'A girl is Arya Stark of Winterfell. And I\'m going home.', - user: 'arya', - date: new Date() - }] - - const body = dataset.flatMap(doc => [{ index: { _index: 'tweets' } }, doc]) - - const { body: bulkResponse } = await client.bulk({ refresh: true, body }) - - if (bulkResponse.errors) { - const erroredDocuments = [] - // The items array has the same order of the dataset we just indexed. - // The presence of the `error` key indicates that the operation - // that we did for the document has failed. - bulkResponse.items.forEach((action, i) => { - const operation = Object.keys(action)[0] - if (action[operation].error) { - erroredDocuments.push({ - // If the status is 429 it means that you can retry the document, - // otherwise it's very likely a mapping error, and you should - // fix the document before to try it again. - status: action[operation].status, - error: action[operation].error, - operation: body[i * 2], - document: body[i * 2 + 1] - }) - } - }) - console.log(erroredDocuments) - } - - const { body: count } = await client.count({ index: 'tweets' }) - console.log(count) -} - -run().catch(console.log) ----- diff --git a/docs/examples/exists.asciidoc b/docs/examples/exists.asciidoc deleted file mode 100644 index 54f65e47b..000000000 --- a/docs/examples/exists.asciidoc +++ /dev/null @@ -1,34 +0,0 @@ -[[exists_examples]] -=== Exists - -Check that the document `/game-of-thrones/1` exists. - -NOTE: Since this API uses the `HEAD` method, the body value will be boolean. - -[source,js] ---------- -'use strict' - -const { Client } = require('@opensearch/opensearch') -const client = new Client({ node: 'http://localhost:9200' }) - -async function run () { - await client.index({ - index: 'game-of-thrones', - id: '1', - body: { - character: 'Ned Stark', - quote: 'Winter is coming.' - } - }) - - const { body } = await client.exists({ - index: 'game-of-thrones', - id: 1 - }) - - console.log(body) // true -} - -run().catch(console.log) ---------- \ No newline at end of file diff --git a/docs/examples/get.asciidoc b/docs/examples/get.asciidoc deleted file mode 100644 index 3ec2f2a3d..000000000 --- a/docs/examples/get.asciidoc +++ /dev/null @@ -1,34 +0,0 @@ -[[get_examples]] -=== Get - -The get API allows to get a typed JSON document from the index based on its id. -The following example gets a JSON document from an index called -`game-of-thrones`, under a type called `_doc`, with id valued `'1'`. - -[source,js] ---------- -'use strict' - -const { Client } = require('@opensearch/opensearch') -const client = new Client({ node: 'http://localhost:9200' }) - -async function run () { - await client.index({ - index: 'game-of-thrones', - id: '1', - body: { - character: 'Ned Stark', - quote: 'Winter is coming.' - } - }) - - const { body } = await client.get({ - index: 'game-of-thrones', - id: '1' - }) - - console.log(body) -} - -run().catch(console.log) ---------- \ No newline at end of file diff --git a/docs/examples/ignore.asciidoc b/docs/examples/ignore.asciidoc deleted file mode 100644 index 1f334cf97..000000000 --- a/docs/examples/ignore.asciidoc +++ /dev/null @@ -1,62 +0,0 @@ -[[ignore_examples]] -=== Ignore - -HTTP status codes which should not be considered errors for this request. - -[source,js] ----- -'use strict' - -const { Client } = require('@opensearch/opensearch') -const client = new Client({ node: 'http://localhost:9200' }) - -async function run () { - const { body: bulkResponse } = await client.bulk({ - refresh: true, - body: [ - // operation to perform - { index: { _index: 'game-of-thrones' } }, - // the document to index - { - character: 'Ned Stark', - quote: 'Winter is coming.' - }, - - { index: { _index: 'game-of-thrones' } }, - { - character: 'Daenerys Targaryen', - quote: 'I am the blood of the dragon.' - }, - - { index: { _index: 'game-of-thrones' } }, - { - character: 'Tyrion Lannister', - quote: 'A mind needs books like a sword needs a whetstone.' - } - ] - }) - - if (bulkResponse.errors) { - console.log(bulkResponse) - process.exit(1) - } - - // Let's search! - const { body } = await client.search({ - index: 'game-of-thrones', - body: { - query: { - match: { - quote: 'fire' - } - } - } - }, { - ignore: [404] - }) - - console.log(body) // ResponseError -} - -run().catch(console.log) ----- \ No newline at end of file diff --git a/docs/examples/index.asciidoc b/docs/examples/index.asciidoc deleted file mode 100644 index 7aaf38f56..000000000 --- a/docs/examples/index.asciidoc +++ /dev/null @@ -1,36 +0,0 @@ -[[examples]] -== Examples - -Following you can find some examples on how to use the client. - -* Use of the <> parameter; -* Executing a <> request; -* Executing a <> request; -* Executing a <> request; -* Executing a <> request; -* Executing a <> request; -* Executing a <> request; -* Executing a <> request; -* Use of the <> parameter; -* Executing a <> request; -* How do I <>? -* Executing a <> request; -* I need <>; -* How to use the <> method; -* How to use <>; - -include::asStream.asciidoc[] -include::bulk.asciidoc[] -include::exists.asciidoc[] -include::get.asciidoc[] -include::ignore.asciidoc[] -include::msearch.asciidoc[] -include::scroll.asciidoc[] -include::search.asciidoc[] -include::suggest.asciidoc[] -include::transport.request.asciidoc[] -include::typescript.asciidoc[] -include::sql.query.asciidoc[] -include::update.asciidoc[] -include::update_by_query.asciidoc[] -include::reindex.asciidoc[] diff --git a/docs/examples/msearch.asciidoc b/docs/examples/msearch.asciidoc deleted file mode 100644 index 54973cb64..000000000 --- a/docs/examples/msearch.asciidoc +++ /dev/null @@ -1,57 +0,0 @@ -[[msearch_examples]] -=== MSearch - -The multi search API allows to execute several search requests within the same -API. - -[source,js] ----- -'use strict' - -const { Client } = require('@opensearch/opensearch') -const client = new Client({ node: 'http://localhost:9200' }) - -async function run () { - const { body: bulkResponse } = await client.bulk({ - refresh: true, - body: [ - { index: { _index: 'game-of-thrones' } }, - { - character: 'Ned Stark', - quote: 'Winter is coming.' - }, - - { index: { _index: 'game-of-thrones' } }, - { - character: 'Daenerys Targaryen', - quote: 'I am the blood of the dragon.' - }, - - { index: { _index: 'game-of-thrones' } }, - { - character: 'Tyrion Lannister', - quote: 'A mind needs books like a sword needs a whetstone.' - } - ] - }) - - if (bulkResponse.errors) { - console.log(bulkResponse) - process.exit(1) - } - - const { body } = await client.msearch({ - body: [ - { index: 'game-of-thrones' }, - { query: { match: { character: 'Daenerys' } } }, - - { index: 'game-of-thrones' }, - { query: { match: { character: 'Tyrion' } } } - ] - }) - - console.log(body.responses) -} - -run().catch(console.log) ----- \ No newline at end of file diff --git a/docs/examples/proxy/.gitignore b/docs/examples/proxy/.gitignore deleted file mode 100644 index 12536433a..000000000 --- a/docs/examples/proxy/.gitignore +++ /dev/null @@ -1,51 +0,0 @@ -# Logs -logs -*.log -npm-debug.log* - -# Runtime data -pids -*.pid -*.seed - -# Directory for instrumented libs generated by jscoverage/JSCover -lib-cov - -# Coverage directory used by tools like istanbul -coverage - -# coverage output -coverage.lcov - -# nyc test coverage -.nyc_output - -# Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) -.grunt - -# node-waf configuration -.lock-wscript - -# Compiled binary addons (http://nodejs.org/api/addons.html) -build/Release - -# Dependency directories -node_modules -jspm_packages - -# Optional npm cache directory -.npm - -# Optional REPL history -.node_repl_history - -# mac files -.DS_Store - -# vim swap files -*.swp - -#Jetbrains editor folder -.idea - -.vercel \ No newline at end of file diff --git a/docs/examples/proxy/README.md b/docs/examples/proxy/README.md deleted file mode 100644 index cfba3f195..000000000 --- a/docs/examples/proxy/README.md +++ /dev/null @@ -1,56 +0,0 @@ -# OpenSearch proxy example - -This folder contains an example of how to build a lightweight proxy -between your frontend code and OpenSearch if you don't -have a more sophisticated backend in place yet. - -> **IMPORTANT:** This is not a production ready code and it is only for demonstration purposes, -> we make no guarantees on it's security and stability. - -This project is designed to be deployed on [Vercel](https://vercel.com/), a cloud platform -for static sites and Serverless Functions. You can use other functions providers, -such as [Google Cloud functions](https://cloud.google.com/functions). - -## Project structure - -The project comes with four endpoints: - -- `/api/search`: runs a search, requires `'read'` permission -- `/api/autocomplete`: runs an autocomplete suggestion, requires `'read'` permission -- `/api/index`: indexes or updates a document, requires `'write'` permission -- `/api/delete`: deletes a document, requires `'write'` permission - -Inside `utils/authorize.js` you can find the authorization logic for the endpoints. -In each endpoint you should configure the `INDEX` variable. - -## How to use - -Create an account on Vercel, then create a deployment. - -### Configure OpenSearch - -Once you have created a deployment on opensearch Cloud copy the generated Cloud Id and the credentials. -Then open `utils/prepare-opensearch.js` and fill your credentials. The script generates -an Api Key that you can use for authenticating your request. Based on the configuration of the Api Key, you will be able -to perform different operation on the specified indices or index pattern. - -### Configure Vercel - -Install the [Vercel CLI](https://vercel.com/docs/cli) to bootstrap the project, -or read the [quickstart](https://vercel.com/docs) documentation. - - -## Authentication - -If you are using OpenSearch only for search purposes, such as a search box, you can create -an Api Key with `read` permissions and store it in your frontend app. Then you can send it -via `Authorization` header to the proxy and run your searches. - -If you need to ingest data as well, it's more secure to have a strong authentication in your application. -For such cases, use an external authentication service, such as [Auth0](https://auth0.com/) -or [Magic Link](https://magic.link/). Then create a different Api Key with `read` and `write` -permissions for authenticated users, that will not be stored in the frontend app. - -## License - -This software is licensed under the [Apache 2 license](../../LICENSE). diff --git a/docs/examples/proxy/api/autocomplete.js b/docs/examples/proxy/api/autocomplete.js deleted file mode 100644 index adeec17f6..000000000 --- a/docs/examples/proxy/api/autocomplete.js +++ /dev/null @@ -1,116 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -// IMPORTANT: this is not a production ready code & purely for demonstration purposes, -// we make no guarantees on it's security and stability - -// NOTE: to make this endpoint work, you should create an ApiKey with 'read' permissions - -'use strict' - -const { Client } = require('@opensearch/opensearch') -const authorize = require('../utils/authorize') - -const INDEX = '' -const client = new Client({ - cloud: { - id: process.env.OPENSEARCH_CLOUD_ID - } -}) - -module.exports = async (req, res) => { - const [err, token] = authorize(req) - if (err) { - res.status(401) - res.json(err) - return - } - - if (typeof req.query.q !== 'string') { - res.status(400) - res.json({ - error: 'Bad Request', - message: 'Missing parameter "query.q"', - statusCode: 400 - }) - return - } - - if (req.query.q.length < 3) { - res.status(400) - res.json({ - error: 'Bad Request', - message: 'The length of "query.q" should be at least 3', - statusCode: 400 - }) - return - } - - try { - const response = await client.search({ - index: INDEX, - // You could directly send from the browser - // the OpenSearch's query DSL, but it will - // expose you to the risk that a malicious user - // could overload your cluster by crafting - // expensive queries. - body: { - _source: ['id', 'url', 'name'], // the fields you want to show in the autocompletion - size: 0, - // https://opensearch.org/docs/opensearch/ux/#autocomplete-queries - suggest: { - suggestions: { - prefix: req.query.q, - completion: { - field: 'suggest', - size: 5 - } - } - } - } - }, { - headers: { - Authorization: `ApiKey ${token}` - } - }) - - // It might be useful to configure http control caching headers - // https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Cache-Control - // res.setHeader('stale-while-revalidate', '30') - res.json(response.body) - } catch (err) { - res.status(err.statusCode || 500) - res.json({ - error: err.name, - message: err.message, - statusCode: err.statusCode || 500 - }) - } -} diff --git a/docs/examples/proxy/api/delete.js b/docs/examples/proxy/api/delete.js deleted file mode 100644 index 576f6ec3f..000000000 --- a/docs/examples/proxy/api/delete.js +++ /dev/null @@ -1,85 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -// IMPORTANT: this is not a production ready code & purely for demonstration purposes, -// we make no guarantees on it's security and stability - -// NOTE: to make this endpoint work, you should create an ApiKey with 'write' permissions - -'use strict' - -const { Client } = require('@opensearch/opensearch') -const authorize = require('../utils/authorize') - -const INDEX = '' -const client = new Client({ - cloud: { - id: process.env.OPENSEARCH_CLOUD_ID - } -}) - -module.exports = async (req, res) => { - const [err, token] = authorize(req) - if (err) { - res.status(401) - res.json(err) - return - } - - if (typeof req.query.id !== 'string' && req.query.id.length === 0) { - res.status(400) - res.json({ - error: 'Bad Request', - message: 'Missing document id', - statusCode: 400 - }) - return - } - - try { - const response = await client.delete({ - index: INDEX, - id: req.query.id - }, { - headers: { - Authorization: `ApiKey ${token}` - } - }) - - res.json(response.body) - } catch (err) { - res.status(err.statusCode || 500) - res.json({ - error: err.name, - message: err.message, - statusCode: err.statusCode || 500 - }) - } -} diff --git a/docs/examples/proxy/api/index.js b/docs/examples/proxy/api/index.js deleted file mode 100644 index 65498b8ae..000000000 --- a/docs/examples/proxy/api/index.js +++ /dev/null @@ -1,87 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -// IMPORTANT: this is not a production ready code & purely for demonstration purposes, -// we make no guarantees on it's security and stability - -// NOTE: to make this endpoint work, you should create an ApiKey with 'write' permissions - -'use strict' - -const { Client } = require('@opensearch/opensearch') -const authorize = require('../utils/authorize') - -const INDEX = '' -const client = new Client({ - cloud: { - id: process.env.OPENSEARCH_CLOUD_ID - } -}) - -module.exports = async (req, res) => { - const [err, token] = authorize(req) - if (err) { - res.status(401) - res.json(err) - return - } - - if (typeof req.body !== 'object') { - res.status(400) - res.json({ - error: 'Bad Request', - message: 'The document should be an object', - statusCode: 400 - }) - return - } - - try { - const response = await client.index({ - index: INDEX, - id: req.query.id, - body: req.body - }, { - headers: { - Authorization: `ApiKey ${token}` - } - }) - - res.status(response.statusCode) - res.json(response.body) - } catch (err) { - res.status(err.statusCode || 500) - res.json({ - error: err.name, - message: err.message, - statusCode: err.statusCode || 500 - }) - } -} diff --git a/docs/examples/proxy/api/search.js b/docs/examples/proxy/api/search.js deleted file mode 100644 index 71c42700d..000000000 --- a/docs/examples/proxy/api/search.js +++ /dev/null @@ -1,97 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -// IMPORTANT: this is not a production ready code & purely for demonstration purposes, -// we make no guarantees on it's security and stability - -// NOTE: to make this endpoint work, you should create an ApiKey with 'read' permissions - -'use strict' - -const { Client } = require('@opensearch/opensearch') -const authorize = require('../utils/authorize') - -const INDEX = '' -const client = new Client({ - cloud: { - id: process.env.OPENSEARCH_CLOUD_ID - } -}) - -module.exports = async (req, res) => { - const [err, token] = authorize(req) - if (err) { - res.status(401) - res.json(err) - return - } - - if (typeof req.body.text !== 'string') { - res.status(400) - res.json({ - error: 'Bad Request', - message: 'Missing parameter "body.text"', - statusCode: 400 - }) - return - } - - try { - const response = await client.search({ - index: INDEX, - // You could directly send from the browser - // the OpenSearch's query DSL, but it will - // expose you to the risk that a malicious user - // could overload your cluster by crafting - // expensive queries. - body: { - query: { - match: { field: req.body.text } - } - } - }, { - headers: { - Authorization: `ApiKey ${token}` - } - }) - - // It might be useful to configure http control caching headers - // https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Cache-Control - // res.setHeader('stale-while-revalidate', '30') - res.json(response.body) - } catch (err) { - res.status(err.statusCode || 500) - res.json({ - error: err.name, - message: err.message, - statusCode: err.statusCode || 500 - }) - } -} diff --git a/docs/examples/proxy/package.json b/docs/examples/proxy/package.json deleted file mode 100644 index 83293c5d8..000000000 --- a/docs/examples/proxy/package.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "name": "proxy-example", - "version": "1.0.0", - "private": true, - "description": "", - "main": "index.js", - "scripts": { - "test": "standard" - }, - "keywords": [], - "author": "Tomas Della Vedova", - "license": "Apache-2.0", - "dependencies": { - "@opensearch/opensearch": "^7.10.0" - }, - "devDependencies": { - "standard": "^16.0.3" - } -} diff --git a/docs/examples/proxy/utils/authorize.js b/docs/examples/proxy/utils/authorize.js deleted file mode 100644 index e0019c192..000000000 --- a/docs/examples/proxy/utils/authorize.js +++ /dev/null @@ -1,65 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -// IMPORTANT: this is not a production ready code & purely for demonstration purposes, -// we make no guarantees on it's security and stability - -'use strict' - -module.exports = (req) => { - const auth = req.headers.authorization - if (typeof auth !== 'string') { - return [{ - error: 'Unauthorized', - message: 'Missing authorization header', - statusCode: 401 - }, null] - } - - const [type, token] = req.headers.authorization.split(' ') - - if (type !== 'Bearer') { - return [{ - error: 'Unauthorized', - message: 'Bad authorization type', - statusCode: 401 - }, null] - } - - if (token.length === 0) { - return [{ - error: 'Unauthorized', - message: 'Bad authorization token', - statusCode: 401 - }, null] - } - - return [null, token] -} diff --git a/docs/examples/proxy/utils/prepare-elasticsearch.js b/docs/examples/proxy/utils/prepare-elasticsearch.js deleted file mode 100644 index 0e459fe16..000000000 --- a/docs/examples/proxy/utils/prepare-elasticsearch.js +++ /dev/null @@ -1,78 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -'use strict' - -const { Client } = require('@opensearch/opensearch') - -// Your Cloud Id -const cloudId = '' -// Your admin username -const username = '' -// Your admin password -const password = '' -// The indices or index patterns you will need to access -const indexNames = ['my-index-name-or-pattern'] -const privileges = ['read'] - -async function generateApiKeys (opts) { - const client = new Client({ - cloud: { - id: cloudId - }, - auth: { - username, - password - } - }) - - const { body } = await client.security.createApiKey({ - body: { - name: 'opensearch-proxy', - role_descriptors: { - 'opensearch-proxy-users': { - index: [{ - names: indexNames, - privileges - }] - } - } - } - }) - - return Buffer.from(`${body.id}:${body.api_key}`).toString('base64') -} - -generateApiKeys() - .then(console.log) - .catch(err => { - console.error(err) - process.exit(1) - }) diff --git a/docs/examples/proxy/vercel.json b/docs/examples/proxy/vercel.json deleted file mode 100644 index d1615c120..000000000 --- a/docs/examples/proxy/vercel.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "headers": [ - { - "source": "/api/(.*)", - "headers": [ - { "key": "Access-Control-Allow-Credentials", "value": "true" }, - { "key": "Access-Control-Allow-Origin", "value": "*" }, - { "key": "Access-Control-Allow-Methods", "value": "GET,OPTIONS,PATCH,DELETE,POST,PUT" }, - { "key": "Access-Control-Allow-Headers", "value": "X-CSRF-Token, X-Requested-With, Accept, Accept-Version, Content-Length, Content-MD5, Content-Type, Date, X-Api-Version" } - ] - } - ] -} diff --git a/docs/examples/reindex.asciidoc b/docs/examples/reindex.asciidoc deleted file mode 100644 index f82451e82..000000000 --- a/docs/examples/reindex.asciidoc +++ /dev/null @@ -1,75 +0,0 @@ -[[reindex_examples]] -=== Reindex - -The `reindex` API extracts the document source from the source index and indexes the documents into the destination index. You can copy all documents to the destination index, reindex a subset of the documents or update the source before to reindex it. - -In the following example we have a `game-of-thrones` index which contains different quotes of various characters, we want to create a new index only for the house Stark and remove the `house` field from the document source. - -[source,js] ----- -'use strict' - -const { Client } = require('@opensearch/opensearch') -const client = new Client({ node: 'http://localhost:9200' }) - -async function run () { - await client.index({ - index: 'game-of-thrones', - body: { - character: 'Ned Stark', - quote: 'Winter is coming.', - house: 'stark' - } - }) - - await client.index({ - index: 'game-of-thrones', - body: { - character: 'Arya Stark', - quote: 'A girl is Arya Stark of Winterfell. And I\'m going home.', - house: 'stark' - } - }) - - await client.index({ - index: 'game-of-thrones', - refresh: true, - body: { - character: 'Tyrion Lannister', - quote: 'A Lannister always pays his debts.', - house: 'lannister' - } - }) - - await client.reindex({ - waitForCompletion: true, - refresh: true, - body: { - source: { - index: 'game-of-thrones', - query: { - match: { character: 'stark' } - } - }, - dest: { - index: 'stark-index' - }, - script: { - lang: 'painless', - source: 'ctx._source.remove("house")' - } - } - }) - - const { body } = await client.search({ - index: 'stark-index', - body: { - query: { match_all: {} } - } - }) - - console.log(body.hits.hits) -} - -run().catch(console.log) ----- diff --git a/docs/examples/scroll.asciidoc b/docs/examples/scroll.asciidoc deleted file mode 100644 index c4298cc1d..000000000 --- a/docs/examples/scroll.asciidoc +++ /dev/null @@ -1,193 +0,0 @@ -[[scroll_examples]] -=== Scroll - -While a search request returns a single “page” of results, the scroll API can be -used to retrieve large numbers of results (or even all results) from a single -search request, in much the same way as you would use a cursor on a traditional -database. - -Scrolling is not intended for real time user requests, but rather for processing -large amounts of data, e.g. in order to reindex the contents of one index into a -new index with a different configuration. - -NOTE: The results that are returned from a scroll request reflect the state of -the index at the time that the initial search request was made, like a snapshot -in time. Subsequent changes to documents (index, update or delete) will only -affect later search requests. - -In order to use scrolling, the initial search request should specify the scroll -parameter in the query string, which tells OpenSearch how long it should keep -the “search context” alive. - -NOTE: Did you know that we provide an helper for sending scroll requests? You can find it {jsclient}/client-helpers.html[here]. - -[source,js] ----- -'use strict' - -const { Client } = require('@opensearch/opensearch') -const client = new Client({ node: 'http://localhost:9200' }) - -async function run () { - const allQuotes = [] - const responseQueue = [] - - // Let's index some data! - const { body: bulkResponse } = await client.bulk({ - // here we are forcing an index refresh, - // otherwise we will not get any result - // in the consequent search - refresh: true, - body: [ - // operation to perform - { index: { _index: 'game-of-thrones' } }, - // the document to index - { - character: 'Ned Stark', - quote: 'Winter is coming.' - }, - - { index: { _index: 'game-of-thrones' } }, - { - character: 'Daenerys Targaryen', - quote: 'I am the blood of the dragon.' - }, - - { index: { _index: 'game-of-thrones' } }, - { - character: 'Tyrion Lannister', - quote: 'A mind needs books like a sword needs a whetstone.' - } - ] - }) - - if (bulkResponse.errors) { - console.log(bulkResponse) - process.exit(1) - } - - // start things off by searching, setting a scroll timeout, and pushing - // our first response into the queue to be processed - const response = await client.search({ - index: 'game-of-thrones', - // keep the search results "scrollable" for 30 seconds - scroll: '30s', - // for the sake of this example, we will get only one result per search - size: 1, - // filter the source to only include the quote field - _source: ['quote'], - body: { - query: { - match_all: {} - } - } - }) - - responseQueue.push(response) - - while (responseQueue.length) { - const { body } = responseQueue.shift() - - // collect the titles from this response - body.hits.hits.forEach(function (hit) { - allQuotes.push(hit._source.quote) - }) - - // check to see if we have collected all of the quotes - if (body.hits.total.value === allQuotes.length) { - console.log('Every quote', allQuotes) - break - } - - // get the next response if there are more quotes to fetch - responseQueue.push( - await client.scroll({ - scrollId: body._scroll_id, - scroll: '30s' - }) - ) - } -} - -run().catch(console.log) ----- - -Another cool usage of the `scroll` API can be done with Node.js ≥ 10, by using -async iteration! - -[source,js] ----- -'use strict' - -const { Client } = require('@opensearch/opensearch') -const client = new Client({ node: 'http://localhost:9200' }) - -// Scroll utility -async function * scrollSearch (params) { - let response = await client.search(params) - - while (true) { - const sourceHits = response.body.hits.hits - - if (sourceHits.length === 0) { - break - } - - for (const hit of sourceHits) { - yield hit - } - - if (!response.body._scroll_id) { - break - } - - response = await client.scroll({ - scrollId: response.body._scroll_id, - scroll: params.scroll - }) - } -} - -async function run () { - await client.bulk({ - refresh: true, - body: [ - { index: { _index: 'game-of-thrones' } }, - { - character: 'Ned Stark', - quote: 'Winter is coming.' - }, - - { index: { _index: 'game-of-thrones' } }, - { - character: 'Daenerys Targaryen', - quote: 'I am the blood of the dragon.' - }, - - { index: { _index: 'game-of-thrones' } }, - { - character: 'Tyrion Lannister', - quote: 'A mind needs books like a sword needs a whetstone.' - } - ] - }) - - const params = { - index: 'game-of-thrones', - scroll: '30s', - size: 1, - _source: ['quote'], - body: { - query: { - match_all: {} - } - } - } - - for await (const hit of scrollSearch(params)) { - console.log(hit._source) - } -} - -run().catch(console.log) ----- diff --git a/docs/examples/search.asciidoc b/docs/examples/search.asciidoc deleted file mode 100644 index 156fd1fdd..000000000 --- a/docs/examples/search.asciidoc +++ /dev/null @@ -1,63 +0,0 @@ -[[search_examples]] -=== Search - -The `search` API allows you to execute a search query and get back search hits -that match the query. The query can either be provided using a simple -https://www.opensearch.org[query string as a parameter], -or using a -https://www.opensearch.org[request body]. - -[source,js] ----- -'use strict' - -const { Client } = require('@opensearch/opensearch') -const client = new Client({ node: 'http://localhost:9200' }) - -async function run () { - // Let's start by indexing some data - await client.index({ - index: 'game-of-thrones', - body: { - character: 'Ned Stark', - quote: 'Winter is coming.' - } - }) - - await client.index({ - index: 'game-of-thrones', - body: { - character: 'Daenerys Targaryen', - quote: 'I am the blood of the dragon.' - } - }) - - await client.index({ - index: 'game-of-thrones', - // here we are forcing an index refresh, - // otherwise we will not get any result - // in the consequent search - refresh: true, - body: { - character: 'Tyrion Lannister', - quote: 'A mind needs books like a sword needs a whetstone.' - } - }) - - // Let's search! - const { body } = await client.search({ - index: 'game-of-thrones', - body: { - query: { - match: { - quote: 'winter' - } - } - } - }) - - console.log(body.hits.hits) -} - -run().catch(console.log) ----- \ No newline at end of file diff --git a/docs/examples/suggest.asciidoc b/docs/examples/suggest.asciidoc deleted file mode 100644 index d68fa1317..000000000 --- a/docs/examples/suggest.asciidoc +++ /dev/null @@ -1,66 +0,0 @@ -[[suggest_examples]] -=== Suggest - -The suggest feature suggests similar looking terms based on a provided text by -using a suggester. _Parts of the suggest feature are still under development._ - -The suggest request part is defined alongside the query part in a `search` -request. If the query part is left out, only suggestions are returned. - -[source,js] ----- -'use strict' - -const { Client } = require('@opensearch/opensearch') -const client = new Client({ node: 'http://localhost:9200' }) - -async function run () { - const { body: bulkResponse } = await client.bulk({ - refresh: true, - body: [ - { index: { _index: 'game-of-thrones' } }, - { - character: 'Ned Stark', - quote: 'Winter is coming.' - }, - - { index: { _index: 'game-of-thrones' } }, - { - character: 'Daenerys Targaryen', - quote: 'I am the blood of the dragon.' - }, - - { index: { _index: 'game-of-thrones' } }, - { - character: 'Tyrion Lannister', - quote: 'A mind needs books like a sword needs a whetstone.' - } - ] - }) - - if (bulkResponse.errors) { - console.log(bulkResponse) - process.exit(1) - } - - const { body } = await client.search({ - index: 'game-of-thrones', - body: { - query: { - match: { quote: 'witner' } - }, - suggest: { - gotsuggest: { - text: 'witner', - term: { field: 'quote' } - } - } - } - }) - - console.log(body) -} - -run().catch(console.log) - ----- \ No newline at end of file diff --git a/docs/examples/transport.request.asciidoc b/docs/examples/transport.request.asciidoc deleted file mode 100644 index fa6805909..000000000 --- a/docs/examples/transport.request.asciidoc +++ /dev/null @@ -1,71 +0,0 @@ -[[transport_request_examples]] -=== transport.request - -It can happen that you need to communicate with {opensearch} by using an API that is not -supported by the client, to mitigate this issue you can directly call -`client.transport.request`, which is the internal utility that the client uses -to communicate with {opensearch} when you use an API method. - -NOTE: When using the `transport.request` method you must provide all the -parameters needed to perform an HTTP call, such as `method`, `path`, -`querystring`, and `body`. - - -TIP: If you find yourself use this method too often, take in consideration the -use of `client.extend`, which will make your code look cleaner and easier to -maintain. - -[source,js] ----- -'use strict' - -const { Client } = require('@opensearch/opensearch') -const client = new Client({ node: 'http://localhost:9200' }) - -async function run () { - const { body: bulkResponse } = await client.bulk({ - refresh: true, - body: [ - { index: { _index: 'game-of-thrones' } }, - { - character: 'Ned Stark', - quote: 'Winter is coming.' - }, - - { index: { _index: 'game-of-thrones' } }, - { - character: 'Daenerys Targaryen', - quote: 'I am the blood of the dragon.' - }, - - { index: { _index: 'game-of-thrones' } }, - { - character: 'Tyrion Lannister', - quote: 'A mind needs books like a sword needs a whetstone.' - } - ] - }) - - if (bulkResponse.errors) { - console.log(bulkResponse) - process.exit(1) - } - - const { body } = await client.transport.request({ - method: 'POST', - path: '/game-of-thrones/_search', - body: { - query: { - match: { - quote: 'winter' - } - } - }, - querystring: {} - }) - - console.log(body) -} - -run().catch(console.log) ----- \ No newline at end of file diff --git a/docs/examples/typescript.asciidoc b/docs/examples/typescript.asciidoc deleted file mode 100644 index 249b12f59..000000000 --- a/docs/examples/typescript.asciidoc +++ /dev/null @@ -1,72 +0,0 @@ -[[typescript_examples]] -=== Typescript - -The client offers a first-class support for TypeScript, since it ships the type -definitions for every exposed API. - -NOTE: If you are using TypeScript you will be required to use _snake_case_ style -to define the API parameters instead of _camelCase_. - -[source,ts] ----- -'use strict' - -import { Client, ApiResponse, RequestParams } from '@opensearch/opensearch' -const client = new Client({ node: 'http://localhost:9200' }) - -async function run (): void { - // Let's start by indexing some data - const doc1: RequestParams.Index = { - index: 'game-of-thrones', - body: { - character: 'Ned Stark', - quote: 'Winter is coming.' - } - } - await client.index(doc1) - - const doc2: RequestParams.Index = { - index: 'game-of-thrones', - body: { - character: 'Daenerys Targaryen', - quote: 'I am the blood of the dragon.' - } - } - await client.index(doc2) - - const doc3: RequestParams.Index = { - index: 'game-of-thrones', - // here we are forcing an index refresh, - // otherwise we will not get any result - // in the consequent search - refresh: true, - body: { - character: 'Tyrion Lannister', - quote: 'A mind needs books like a sword needs a whetstone.' - } - } - await client.index(doc3) - - // Let's search! - const params: RequestParams.Search = { - index: 'game-of-thrones', - body: { - query: { - match: { - quote: 'winter' - } - } - } - } - client - .search(params) - .then((result: ApiResponse) => { - console.log(result.body.hits.hits) - }) - .catch((err: Error) => { - console.log(err) - }) -} - -run() ----- \ No newline at end of file diff --git a/docs/examples/update-by-query.asciidoc b/docs/examples/update-by-query.asciidoc deleted file mode 100644 index 9f164dc1b..000000000 --- a/docs/examples/update-by-query.asciidoc +++ /dev/null @@ -1,59 +0,0 @@ -[[update_by_query_examples]] -== Update By Query - -The simplest usage of _update_by_query just performs an update on every document in the index without changing the source. This is useful to pick up a new property or some other online mapping change. - -[source,js] ---------- -'use strict' - -const { Client } = require('@opensearch/opensearch') -const client = new Client({ node: 'http://localhost:9200' }) - -async function run () { - await client.index({ - index: 'game-of-thrones', - body: { - character: 'Ned Stark', - quote: 'Winter is coming.' - } - }) - - await client.index({ - index: 'game-of-thrones', - refresh: true, - body: { - character: 'Arya Stark', - quote: 'A girl is Arya Stark of Winterfell. And I\'m going home.' - } - }) - - await client.updateByQuery({ - index: 'game-of-thrones', - refresh: true, - body: { - script: { - lang: 'painless', - source: 'ctx._source["house"] = "stark"' - }, - query: { - match: { - character: 'stark' - } - } - } - }) - - const { body } = await client.search({ - index: 'game-of-thrones', - body: { - query: { match_all: {} } - } - }) - - console.log(body.hits.hits) -} - -run().catch(console.log) - ---------- diff --git a/docs/examples/update.asciidoc b/docs/examples/update.asciidoc deleted file mode 100644 index bd4244ee6..000000000 --- a/docs/examples/update.asciidoc +++ /dev/null @@ -1,92 +0,0 @@ -[[update_examples]] -=== Update - -The update API allows updates of a specific document using the given script. + -In the following example, we will index a document that also tracks how many times a character has said the given quote, and then we will update the `times` field. - -[source,js] ---------- -'use strict' - -const { Client } = require('@opensearch/opensearch') -const client = new Client({ node: 'http://localhost:9200' }) - -async function run () { - await client.index({ - index: 'game-of-thrones', - id: '1', - body: { - character: 'Ned Stark', - quote: 'Winter is coming.', - times: 0 - } - }) - - await client.update({ - index: 'game-of-thrones', - id: '1', - body: { - script: { - lang: 'painless', - source: 'ctx._source.times++' - // you can also use parameters - // source: 'ctx._source.times += params.count', - // params: { count: 1 } - } - } - }) - - const { body } = await client.get({ - index: 'game-of-thrones', - id: '1' - }) - - console.log(body) -} - -run().catch(console.log) - ---------- - -With the update API, you can also run a partial update of a document. - -[source,js] ---------- -'use strict' - -const { Client } = require('@opensearch/opensearch') -const client = new Client({ node: 'http://localhost:9200' }) - -async function run () { - await client.index({ - index: 'game-of-thrones', - id: '1', - body: { - character: 'Ned Stark', - quote: 'Winter is coming.', - isAlive: true - } - }) - - await client.update({ - index: 'game-of-thrones', - id: '1', - body: { - doc: { - isAlive: false - } - } - }) - - const { body } = await client.get({ - index: 'game-of-thrones', - id: '1' - }) - - console.log(body) -} - -run().catch(console.log) - - ---------- \ No newline at end of file diff --git a/docs/examples/update_by_query.asciidoc b/docs/examples/update_by_query.asciidoc deleted file mode 100644 index 6eb217c2d..000000000 --- a/docs/examples/update_by_query.asciidoc +++ /dev/null @@ -1,59 +0,0 @@ -[[update_by_query_examples]] -=== Update By Query - -The simplest usage of _update_by_query just performs an update on every document in the index without changing the source. This is useful to pick up a new property or some other online mapping change. - -[source,js] ---------- -'use strict' - -const { Client } = require('@opensearch/opensearch') -const client = new Client({ node: 'http://localhost:9200' }) - -async function run () { - await client.index({ - index: 'game-of-thrones', - body: { - character: 'Ned Stark', - quote: 'Winter is coming.' - } - }) - - await client.index({ - index: 'game-of-thrones', - refresh: true, - body: { - character: 'Arya Stark', - quote: 'A girl is Arya Stark of Winterfell. And I\'m going home.' - } - }) - - await client.updateByQuery({ - index: 'game-of-thrones', - refresh: true, - body: { - script: { - lang: 'painless', - source: 'ctx._source["house"] = "stark"' - }, - query: { - match: { - character: 'stark' - } - } - } - }) - - const { body } = await client.search({ - index: 'game-of-thrones', - body: { - query: { match_all: {} } - } - }) - - console.log(body.hits.hits) -} - -run().catch(console.log) - ---------- From 6d4e5294e49793f679de3b75be6cb750e103a6d2 Mon Sep 17 00:00:00 2001 From: Bishoy Boktor Date: Fri, 20 Aug 2021 17:09:24 +0000 Subject: [PATCH 06/10] Removed unused .ci file Signed-off-by: Bishoy Boktor --- .ci/run-opensearch.sh | 113 ------------------------------------------ 1 file changed, 113 deletions(-) delete mode 100755 .ci/run-opensearch.sh diff --git a/.ci/run-opensearch.sh b/.ci/run-opensearch.sh deleted file mode 100755 index 1138ede19..000000000 --- a/.ci/run-opensearch.sh +++ /dev/null @@ -1,113 +0,0 @@ -#!/usr/bin/env bash -# -# Launch one or more OpenSearch nodes via the Docker image, -# to form a cluster suitable for running the REST API tests. -# -# Export the STACK_VERSION variable, eg. '8.0.0-SNAPSHOT'. -# Export the TEST_SUITE variable. -# Export the NUMBER_OF_NODES variable to start more than 1 node - -# Version 1.4.0 -# - Initial version of the run-opensearch.sh script -# - Deleting the volume should not dependent on the container still running -# - Fixed `ES_JAVA_OPTS` config -# - Moved to STACK_VERSION and TEST_VERSION -# - Refactored into functions and imports -# - Support NUMBER_OF_NODES -# - Added 5 retries on docker pull for fixing transient network errors -# - Added flags to make local CCR configurations work -# - Added action.destructive_requires_name=false as the default will be true in v8 -# - Added ingest.geoip.downloader.enabled=false as it causes false positives in testing - -script_path=$(dirname $(realpath -s $0)) -source $script_path/functions/imports.sh -set -euo pipefail - -echo -e "\033[34;1mINFO:\033[0m Take down node if called twice with the same arguments (DETACH=true) or on seperate terminals \033[0m" -cleanup_node $opensearch_node_name - -master_node_name=${opensearch_node_name} -cluster_name=${moniker}${suffix} - -declare -a volumes -environment=($(cat <<-END - --env node.name=$opensearch_node_name - --env cluster.name=$cluster_name - --env cluster.initial_master_nodes=$master_node_name - --env discovery.seed_hosts=$master_node_name - --env cluster.routing.allocation.disk.threshold_enabled=false - --env bootstrap.memory_lock=true - --env node.attr.testattr=test - --env path.repo=/tmp - --env repositories.url.allowed_urls=http://snapshot.test* - --env action.destructive_requires_name=false - --env ingest.geoip.downloader.enabled=false -END -)) - volumes+=($(cat <<-END - --volume $ssl_cert:/usr/share/opensearch/config/certs/testnode.crt - --volume $ssl_key:/usr/share/opensearch/config/certs/testnode.key - --volume $ssl_ca:/usr/share/opensearch/config/certs/ca.crt -END -)) -fi - -cert_validation_flags="" - -# Pull the container, retry on failures up to 5 times with -# short delays between each attempt. Fixes most transient network errors. -docker_pull_attempts=0 -until [ "$docker_pull_attempts" -ge 5 ] -do - docker pull docker.opensearch.co/opensearch/"$opensearch_container" && break - docker_pull_attempts=$((docker_pull_attempts+1)) - echo "Failed to pull image, retrying in 10 seconds (retry $docker_pull_attempts/5)..." - sleep 10 -done - -NUMBER_OF_NODES=${NUMBER_OF_NODES-1} -http_port=9200 -for (( i=0; i<$NUMBER_OF_NODES; i++, http_port++ )); do - node_name=${opensearch_node_name}$i - node_url=${external_opensearch_url/9200/${http_port}}$i - if [[ "$i" == "0" ]]; then node_name=$opensearch_node_name; fi - environment+=($(cat <<-END - --env node.name=$node_name -END -)) - echo "$i: $http_port $node_url " - volume_name=${node_name}-${suffix}-data - volumes+=($(cat <<-END - --volume $volume_name:/usr/share/opensearch/data${i} -END -)) - - # make sure we detach for all but the last node if DETACH=false (default) so all nodes are started - local_detach="true" - if [[ "$i" == "$((NUMBER_OF_NODES-1))" ]]; then local_detach=$DETACH; fi - echo -e "\033[34;1mINFO:\033[0m Starting container $node_name \033[0m" - set -x - docker run \ - --name "$node_name" \ - --network "$network_name" \ - --env "ES_JAVA_OPTS=-Xms1g -Xmx1g" \ - "${environment[@]}" \ - "${volumes[@]}" \ - --publish "$http_port":9200 \ - --ulimit nofile=65536:65536 \ - --ulimit memlock=-1:-1 \ - --detach="$local_detach" \ - --health-cmd="curl $cert_validation_flags --fail $opensearch_url/_cluster/health || exit 1" \ - --health-interval=2s \ - --health-retries=20 \ - --health-timeout=2s \ - --rm \ - docker.opensearch.co/opensearch/"$opensearch_container"; - - set +x - if wait_for_container "$opensearch_node_name" "$network_name"; then - echo -e "\033[32;1mSUCCESS:\033[0m Running on: $node_url\033[0m" - fi - -done - From 29194daa2e7bf62f4fedde64a0acc924555561fe Mon Sep 17 00:00:00 2001 From: Bishoy Boktor Date: Fri, 20 Aug 2021 19:29:44 +0000 Subject: [PATCH 07/10] Address PR comments Signed-off-by: Bishoy Boktor --- index.js | 3 +- lib/Transport.js | 2 +- package.json | 2 +- scripts/download-artifacts.js | 1 + test/benchmarks/suite.js | 2 +- test/unit/client.test.js | 131 ---------------------------------- 6 files changed, 5 insertions(+), 136 deletions(-) diff --git a/index.js b/index.js index e8c9c3234..6c137649a 100644 --- a/index.js +++ b/index.js @@ -183,8 +183,7 @@ class Client extends OpenSearchAPI { this.helpers = new Helpers({ client: this, - maxRetries: options.maxRetries, - metaHeader: null + maxRetries: options.maxRetries }) } diff --git a/lib/Transport.js b/lib/Transport.js index 75a781f4d..1d87dbeed 100644 --- a/lib/Transport.js +++ b/lib/Transport.js @@ -587,7 +587,7 @@ class Transport { // support odfe > v7 validation if (major !== 7) { - debug('Invalid OpenSearch distribution') + debug('Invalid distribution') return compatibleCheckEmitter.emit('compatible-check', false) } diff --git a/package.json b/package.json index 66f30cc85..5c492fdcb 100644 --- a/package.json +++ b/package.json @@ -11,7 +11,7 @@ "./": "./" }, "homepage": "https://www.opensearch.org/", - "version": "7.14.0", + "version": "1.0.0", "versionCanary": "7.14.0-canary.6", "keywords": [ "opensearch", diff --git a/scripts/download-artifacts.js b/scripts/download-artifacts.js index ffa5569ad..e8094cafd 100644 --- a/scripts/download-artifacts.js +++ b/scripts/download-artifacts.js @@ -113,6 +113,7 @@ function loadInfo () { } async function resolve (version, hash) { + // TODO: fix artifacts url const response = await fetch(`https://artifacts-api.opensearch.co/v1/versions/${version}`) if (!response.ok) { throw new Error(`unexpected response ${response.statusText}`) diff --git a/test/benchmarks/suite.js b/test/benchmarks/suite.js index 6305bacbe..f10c7ffce 100644 --- a/test/benchmarks/suite.js +++ b/test/benchmarks/suite.js @@ -213,7 +213,7 @@ function buildBenchmark (options = {}) { }, agent: { version: clientVersion, - name: '@opensearch/opensearch-js', + name: '@opensearch-project/opensearch-js', git: { branch: branch.slice(0, -1), sha: commit.latest.hash, diff --git a/test/unit/client.test.js b/test/unit/client.test.js index 5b782d02f..075739d53 100644 --- a/test/unit/client.test.js +++ b/test/unit/client.test.js @@ -35,7 +35,6 @@ const { URL } = require('url') const buffer = require('buffer') const intoStream = require('into-stream') const { ConnectionPool, Transport, Connection, errors } = require('../../index') -const { CloudConnectionPool } = require('../../lib/pool') const { Client, buildServer } = require('../utils') let clientVersion = require('../../package.json').version if (clientVersion.includes('-')) { @@ -649,136 +648,6 @@ test('Extend client APIs', t => { t.end() }) -test('opensearch cloud config', t => { - t.test('Basic', t => { - t.plan(5) - const client = new Client({ - cloud: { - // 'localhost$abcd$efgh' - id: 'name:bG9jYWxob3N0JGFiY2QkZWZnaA==', - username: 'opensearch', - password: 'changeme' - } - }) - - const pool = client.connectionPool - t.ok(pool instanceof CloudConnectionPool) - t.match(pool.connections.find(c => c.id === 'https://abcd.localhost/'), { - url: new URL('https://opensearch:changeme@abcd.localhost'), - id: 'https://abcd.localhost/', - headers: { - authorization: 'Basic ' + Buffer.from('opensearch:changeme').toString('base64') - }, - ssl: { secureProtocol: 'TLSv1_2_method' }, - deadCount: 0, - resurrectTimeout: 0, - roles: { - master: true, - data: true, - ingest: true - } - }) - - t.equal(client.transport.compression, 'gzip') - t.equal(client.transport.suggestCompression, true) - t.same(pool._ssl, { secureProtocol: 'TLSv1_2_method' }) - }) - - t.test('Without opensearchDashboards component', t => { - t.plan(5) - const client = new Client({ - cloud: { - // 'localhost$abcd$' - id: 'name:bG9jYWxob3N0JGFiY2Qk', - username: 'opensearch', - password: 'changeme' - } - }) - - const pool = client.connectionPool - t.ok(pool instanceof CloudConnectionPool) - t.match(pool.connections.find(c => c.id === 'https://abcd.localhost/'), { - url: new URL('https://opensearch:changeme@abcd.localhost'), - id: 'https://abcd.localhost/', - headers: { - authorization: 'Basic ' + Buffer.from('opensearch:changeme').toString('base64') - }, - ssl: { secureProtocol: 'TLSv1_2_method' }, - deadCount: 0, - resurrectTimeout: 0, - roles: { - master: true, - data: true, - ingest: true - } - }) - - t.equal(client.transport.compression, 'gzip') - t.equal(client.transport.suggestCompression, true) - t.same(pool._ssl, { secureProtocol: 'TLSv1_2_method' }) - }) - - t.test('Auth as separate option', t => { - t.plan(5) - const client = new Client({ - cloud: { - // 'localhost$abcd$efgh' - id: 'name:bG9jYWxob3N0JGFiY2QkZWZnaA==' - }, - auth: { - username: 'opensearch', - password: 'changeme' - } - }) - - const pool = client.connectionPool - t.ok(pool instanceof CloudConnectionPool) - t.match(pool.connections.find(c => c.id === 'https://abcd.localhost/'), { - url: new URL('https://opensearch:changeme@abcd.localhost'), - id: 'https://abcd.localhost/', - headers: { - authorization: 'Basic ' + Buffer.from('opensearch:changeme').toString('base64') - }, - ssl: { secureProtocol: 'TLSv1_2_method' }, - deadCount: 0, - resurrectTimeout: 0, - roles: { - master: true, - data: true, - ingest: true - } - }) - - t.equal(client.transport.compression, 'gzip') - t.equal(client.transport.suggestCompression, true) - t.same(pool._ssl, { secureProtocol: 'TLSv1_2_method' }) - }) - - t.test('Override default options', t => { - t.plan(4) - const client = new Client({ - cloud: { - // 'localhost$abcd$efgh' - id: 'name:bG9jYWxob3N0JGFiY2QkZWZnaA==', - username: 'opensearch', - password: 'changeme' - }, - compression: false, - suggestCompression: false, - ssl: { - secureProtocol: 'TLSv1_1_method' - } - }) - - t.ok(client.connectionPool instanceof CloudConnectionPool) - t.equal(client.transport.compression, false) - t.equal(client.transport.suggestCompression, false) - t.same(client.connectionPool._ssl, { secureProtocol: 'TLSv1_1_method' }) - }) - - t.end() -}) - test('Opaque Id support', t => { t.test('No opaqueId', t => { t.plan(3) From db9e3a0e4dd0b565fdf81d08206c81ca727458c8 Mon Sep 17 00:00:00 2001 From: Bishoy Boktor Date: Fri, 20 Aug 2021 19:43:36 +0000 Subject: [PATCH 08/10] Address PR comments Signed-off-by: Bishoy Boktor --- test/benchmarks/macro/complex.bench.js | 6 +++--- test/benchmarks/macro/simple.bench.js | 6 +++--- test/benchmarks/micro/basic.bench.js | 6 +++--- 3 files changed, 9 insertions(+), 9 deletions(-) diff --git a/test/benchmarks/macro/complex.bench.js b/test/benchmarks/macro/complex.bench.js index c2827d4d3..2ff68c27e 100644 --- a/test/benchmarks/macro/complex.bench.js +++ b/test/benchmarks/macro/complex.bench.js @@ -21,9 +21,9 @@ const { join } = require('path') const split = require('split2') const { bench, beforeEach, afterEach } = require('../suite')({ report: { - url: process.env.ES_RESULT_CLUSTER_URL, - username: process.env.ES_RESULT_CLUSTER_USERNAME, - password: process.env.ES_RESULT_CLUSTER_PASSWORD + url: process.env.OPENSEARCH_RESULT_CLUSTER_URL, + username: process.env.OPENSEARCH_RESULT_CLUSTER_USERNAME, + password: process.env.OPENSEARCH_RESULT_CLUSTER_PASSWORD } }) diff --git a/test/benchmarks/macro/simple.bench.js b/test/benchmarks/macro/simple.bench.js index 84f4c1f7d..efe24391d 100644 --- a/test/benchmarks/macro/simple.bench.js +++ b/test/benchmarks/macro/simple.bench.js @@ -16,9 +16,9 @@ const { statSync } = require('fs') const { join } = require('path') const { bench, beforeEach, afterEach } = require('../suite')({ report: { - url: process.env.ES_RESULT_CLUSTER_URL, - username: process.env.ES_RESULT_CLUSTER_USERNAME, - password: process.env.ES_RESULT_CLUSTER_PASSWORD + url: process.env.OPENSEARCH_RESULT_CLUSTER_URL, + username: process.env.OPENSEARCH_RESULT_CLUSTER_USERNAME, + password: process.env.OPENSEARCH_RESULT_CLUSTER_PASSWORD } }) diff --git a/test/benchmarks/micro/basic.bench.js b/test/benchmarks/micro/basic.bench.js index 3d6231e0f..0b5162591 100644 --- a/test/benchmarks/micro/basic.bench.js +++ b/test/benchmarks/micro/basic.bench.js @@ -13,9 +13,9 @@ const { bench } = require('../suite')({ report: { - url: process.env.ES_RESULT_CLUSTER_URL, - username: process.env.ES_RESULT_CLUSTER_USERNAME, - password: process.env.ES_RESULT_CLUSTER_PASSWORD + url: process.env.OPENSEARCH_RESULT_CLUSTER_URL, + username: process.env.OPENSEARCH_RESULT_CLUSTER_USERNAME, + password: process.env.OPENSEARCH_RESULT_CLUSTER_PASSWORD } }) const { Client } = require('../../../index') From 4e77960ccac507b9fb676e38a559a42447a8ff6a Mon Sep 17 00:00:00 2001 From: Bishoy Boktor Date: Fri, 20 Aug 2021 20:22:20 +0000 Subject: [PATCH 09/10] Remove compatibility check code and tests, and changed some minor references Signed-off-by: Bishoy Boktor --- lib/Serializer.js | 2 +- lib/Transport.js | 40 +- lib/errors.js | 2 +- scripts/utils/clone-opensearch.js | 2 +- scripts/utils/generateDocs.js | 2 +- test/acceptance/product-check.test.js | 858 -------------------------- test/fixtures/stackoverflow.ndjson | 6 +- test/integration/README.md | 4 +- 8 files changed, 14 insertions(+), 902 deletions(-) delete mode 100644 test/acceptance/product-check.test.js diff --git a/lib/Serializer.js b/lib/Serializer.js index 498640735..20046a935 100644 --- a/lib/Serializer.js +++ b/lib/Serializer.js @@ -91,7 +91,7 @@ class Serializer { const keys = Object.keys(object) for (let i = 0, len = keys.length; i < len; i++) { const key = keys[i] - // opensearch will complain for keys without a value + // OpenSearch will complain for keys without a value if (object[key] === undefined) { delete object[key] } else if (Array.isArray(object[key]) === true) { diff --git a/lib/Transport.js b/lib/Transport.js index 1d87dbeed..e4d50ca9c 100644 --- a/lib/Transport.js +++ b/lib/Transport.js @@ -41,8 +41,7 @@ const { RequestAbortedError, NoLivingConnectionsError, ResponseError, - ConfigurationError, - NotCompatibleError + ConfigurationError } = require('./errors') const noop = () => { } @@ -459,36 +458,7 @@ class Transport { } } - // still need to check the compatibility or waiting for the check to finish - if (this[kCompatibleCheck] === 0 || this[kCompatibleCheck] === 1) { - // let pass info requests - if (params.method === 'GET' && params.path === '/') { - prepareRequest() - } else { - // wait for compatible check to finish - compatibleCheckEmitter.once('compatible-check', status => { - if (status === false) { - const err = new NotCompatibleError(result) - this.emit('request', err, result) - process.nextTick(callback, err, result) - } else { - prepareRequest() - } - }) - // the very first request triggers the compatible check - if (this[kCompatibleCheck] === 0) { - this.checkCompatibleInfo() - } - } - // the compatible check is finished and it's not OpenSearch - } else if (this[kCompatibleCheck] === 3) { - const err = new NotCompatibleError(result) - this.emit('request', err, result) - process.nextTick(callback, err, result) - // the compatible check finished and it's OpenSearch - } else { - prepareRequest() - } + prepareRequest() return transportReturn } @@ -561,14 +531,14 @@ class Transport { debug('compatible check failed', err) if (err.statusCode === 401 || err.statusCode === 403) { this[kCompatibleCheck] = 2 - process.emitWarning('The client is unable to verify that the server is OpenSearch due to security privileges on the server side. Some functionality may not be compatible if the server is running an unsupported product.') + process.emitWarning('The client is unable to verify the distribution due to security privileges on the server side. Some functionality may not be compatible if the server is running an unsupported product.') compatibleCheckEmitter.emit('compatible-check', true) } else { this[kCompatibleCheck] = 0 compatibleCheckEmitter.emit('compatible-check', false) } } else { - debug('Checking opensearch version', result.body, result.headers) + debug('Checking OpenSearch version', result.body, result.headers) if (result.body.version == null || typeof result.body.version.number !== 'string') { debug('Can\'t access OpenSearch version') return compatibleCheckEmitter.emit('compatible-check', false) @@ -578,7 +548,7 @@ class Transport { const version = result.body.version.number.split('.') const major = Number(version[0]) - // support opensearch validation + // support OpenSearch validation if (distribution === 'opensearch') { debug('Valid OpenSearch distribution') this[kCompatibleCheck] = 2 diff --git a/lib/errors.js b/lib/errors.js index 7b10adab2..e1d57cf23 100644 --- a/lib/errors.js +++ b/lib/errors.js @@ -149,7 +149,7 @@ class NotCompatibleError extends OpenSearchClientError { super('Not Compatible Error') Error.captureStackTrace(this, NotCompatibleError) this.name = 'NotCompatibleError' - this.message = 'The client noticed that the server is not a supported distribution of OpenSearch' + this.message = 'The client noticed that the server is not a supported distribution' this.meta = meta } } diff --git a/scripts/utils/clone-opensearch.js b/scripts/utils/clone-opensearch.js index 6e52fa33b..f3ec4e2d1 100644 --- a/scripts/utils/clone-opensearch.js +++ b/scripts/utils/clone-opensearch.js @@ -43,7 +43,7 @@ function cloneAndCheckout (opts, callback) { withTag(tag, callback) /** - * Sets the opensearch repository to the given tag. + * Sets the OpenSearch repository to the given tag. * If the repository is not present in `opensearchFolder` it will * clone the repository and the checkout the tag. * If the repository is already present but it cannot checkout to diff --git a/scripts/utils/generateDocs.js b/scripts/utils/generateDocs.js index 0f68af3e5..64031a1d5 100644 --- a/scripts/utils/generateDocs.js +++ b/scripts/utils/generateDocs.js @@ -76,7 +76,7 @@ function generateDocs (common, spec) { == API Reference - This document contains the entire list of the OpenSearch API supported by the client, both OSS and commercial. The client is entirely licensed under Apache 2.0. + This document contains the entire list of the OpenSearch API supported by the client. The client is entirely licensed under Apache 2.0. OpenSearch exposes an HTTP layer to communicate with, and the client is a library that will help you do this. Because of this reason, you will see HTTP related parameters, such as ${'`'}body${'`'} or ${'`'}headers${'`'}. diff --git a/test/acceptance/product-check.test.js b/test/acceptance/product-check.test.js deleted file mode 100644 index 0b30b7fe7..000000000 --- a/test/acceptance/product-check.test.js +++ /dev/null @@ -1,858 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -'use strict' - -const { test } = require('tap') -const { Client } = require('../../') -const { - connection: { - MockConnectionTimeout, - buildMockConnection - } -} = require('../utils') - -test('No errors v7', t => { - t.plan(7) - const MockConnection = buildMockConnection({ - onRequest (params) { - return { - statusCode: 200, - body: { - name: '1ef419078577', - cluster_name: 'docker-cluster', - cluster_uuid: 'cQ5pAMvRRTyEzObH4L5mTA', - version: { - number: '7.10.2', - build_type: 'docker', - build_hash: '5fb4c050958a6b0b6a70a6fb3e616d0e390eaac3', - build_date: '2021-07-10T01:45:02.136546168Z', - build_snapshot: true, - lucene_version: '8.9.0', - minimum_wire_compatibility_version: '7.15.0', - minimum_index_compatibility_version: '7.0.0' - } - } - } - } - }) - - const requests = [{ - method: 'GET', - path: '/' - }, { - method: 'POST', - path: '/foo/_search' - }] - - const client = new Client({ - node: 'http://localhost:9200', - Connection: MockConnection - }) - - client.on('request', (err, event) => { - t.error(err) - const req = requests.shift() - t.equal(event.meta.request.params.method, req.method) - t.equal(event.meta.request.params.path, req.path) - }) - - client.search({ - index: 'foo', - body: { - query: { - match_all: {} - } - } - }, (err, result) => { - t.error(err) - }) -}) - -test('Errors not v7', t => { - t.plan(3) - const MockConnection = buildMockConnection({ - onRequest (params) { - return { - statusCode: 200, - body: { - name: '1ef419078577', - cluster_name: 'docker-cluster', - cluster_uuid: 'cQ5pAMvRRTyEzObH4L5mTA', - version: { - number: '6.8.0', - build_type: 'docker', - build_hash: '5fb4c050958a6b0b6a70a6fb3e616d0e390eaac3', - build_date: '2021-07-10T01:45:02.136546168Z', - build_snapshot: true, - lucene_version: '8.9.0', - minimum_wire_compatibility_version: '7.15.0', - minimum_index_compatibility_version: '7.0.0' - } - } - } - } - }) - - const requests = [{ - method: 'GET', - path: '/' - }, { - method: 'POST', - path: '/foo/_search' - }] - - const client = new Client({ - node: 'http://localhost:9200', - Connection: MockConnection - }) - - client.on('request', (err, event) => { - const req = requests.shift() - if (req.method === 'GET') { - t.error(err) - } else { - t.equal(err.message, 'The client noticed that the server is not a supported distribution of OpenSearch') - } - }) - - client.search({ - index: 'foo', - body: { - query: { - match_all: {} - } - } - }, (err, result) => { - t.equal(err.message, 'The client noticed that the server is not a supported distribution of OpenSearch') - }) -}) - -test('Support opensearch', t => { - t.plan(7) - const MockConnection = buildMockConnection({ - onRequest (params) { - return { - statusCode: 200, - body: { - name: '1ef419078577', - cluster_name: 'docker-cluster', - cluster_uuid: 'cQ5pAMvRRTyEzObH4L5mTA', - version: { - number: '1.0.0', - build_type: 'docker', - build_hash: '5fb4c050958a6b0b6a70a6fb3e616d0e390eaac3', - build_date: '2021-07-10T01:45:02.136546168Z', - build_snapshot: true, - distribution: 'opensearch', - lucene_version: '8.9.0', - minimum_wire_compatibility_version: '1.0.0', - minimum_index_compatibility_version: '1.0.0' - } - } - } - } - }) - - const requests = [{ - method: 'GET', - path: '/' - }, { - method: 'POST', - path: '/foo/_search' - }] - - const client = new Client({ - node: 'http://localhost:9200', - Connection: MockConnection - }) - - client.on('request', (err, event) => { - t.error(err) - const req = requests.shift() - t.equal(event.meta.request.params.method, req.method) - t.equal(event.meta.request.params.path, req.path) - }) - - client.search({ - index: 'foo', - body: { - query: { - match_all: {} - } - } - }, (err, result) => { - t.error(err) - }) -}) - -test('Auth error - 401', t => { - t.plan(8) - const MockConnection = buildMockConnection({ - onRequest (params) { - return { - statusCode: 401, - body: { - security: 'exception' - } - } - } - }) - - process.on('warning', onWarning) - function onWarning (warning) { - t.equal(warning.message, 'The client is unable to verify that the server is OpenSearch due to security privileges on the server side. Some functionality may not be compatible if the server is running an unsupported product.') - } - - const requests = [{ - method: 'GET', - path: '/' - }, { - method: 'POST', - path: '/foo/_search' - }] - - const client = new Client({ - node: 'http://localhost:9200', - Connection: MockConnection - }) - - client.on('request', (err, event) => { - t.error(err) - const req = requests.shift() - t.equal(event.meta.request.params.method, req.method) - t.equal(event.meta.request.params.path, req.path) - }) - - client.search({ - index: 'foo', - body: { - query: { - match_all: {} - } - } - }, (err, result) => { - t.equal(err.statusCode, 401) - process.removeListener('warning', onWarning) - }) -}) - -test('Auth error - 403', t => { - t.plan(8) - const MockConnection = buildMockConnection({ - onRequest (params) { - return { - statusCode: 403, - body: { - security: 'exception' - } - } - } - }) - - process.on('warning', onWarning) - function onWarning (warning) { - t.equal(warning.message, 'The client is unable to verify that the server is OpenSearch due to security privileges on the server side. Some functionality may not be compatible if the server is running an unsupported product.') - } - - const requests = [{ - method: 'GET', - path: '/' - }, { - method: 'POST', - path: '/foo/_search' - }] - - const client = new Client({ - node: 'http://localhost:9200', - Connection: MockConnection - }) - - client.on('request', (err, event) => { - t.error(err) - const req = requests.shift() - t.equal(event.meta.request.params.method, req.method) - t.equal(event.meta.request.params.path, req.path) - }) - - client.search({ - index: 'foo', - body: { - query: { - match_all: {} - } - } - }, (err, result) => { - t.equal(err.statusCode, 403) - process.removeListener('warning', onWarning) - }) -}) - -test('500 error', t => { - t.plan(8) - - let count = 0 - const requests = [{ - method: 'GET', - path: '/' - }, { - method: 'GET', - path: '/' - }, { - method: 'POST', - path: '/foo/_search' - }] - - const MockConnection = buildMockConnection({ - onRequest (params) { - const req = requests.shift() - t.equal(req.method, params.method) - t.equal(req.path, params.path) - - if (count++ >= 1) { - return { - statusCode: 200, - body: { - name: '1ef419078577', - cluster_name: 'docker-cluster', - cluster_uuid: 'cQ5pAMvRRTyEzObH4L5mTA', - version: { - number: '7.10.0', - build_type: 'docker', - build_hash: '5fb4c050958a6b0b6a70a6fb3e616d0e390eaac3', - build_date: '2021-07-10T01:45:02.136546168Z', - build_snapshot: true, - lucene_version: '8.9.0', - minimum_wire_compatibility_version: '7.15.0', - minimum_index_compatibility_version: '7.0.0' - } - } - } - } else { - return { - statusCode: 500, - body: { - error: 'kaboom' - } - } - } - } - }) - - const client = new Client({ - node: 'http://localhost:9200', - Connection: MockConnection - }) - - client.search({ - index: 'foo', - body: { - query: { - match_all: {} - } - } - }, (err, result) => { - t.equal(err.message, 'The client noticed that the server is not a supported distribution of OpenSearch') - - client.search({ - index: 'foo', - body: { - query: { - match_all: {} - } - } - }, (err, result) => { - t.error(err) - }) - }) -}) - -test('TimeoutError', t => { - t.plan(3) - - const requests = [{ - method: 'GET', - path: '/' - }, { - method: 'POST', - path: '/foo/_search' - }] - - const client = new Client({ - node: 'http://localhost:9200', - Connection: MockConnectionTimeout, - maxRetries: 0 - }) - - client.on('request', (err, event) => { - const req = requests.shift() - if (req.method === 'GET') { - t.error(err) - } else { - t.equal(err.message, 'The client noticed that the server is not a supported distribution of OpenSearch') - } - }) - - client.search({ - index: 'foo', - body: { - query: { - match_all: {} - } - } - }, (err, result) => { - t.equal(err.message, 'The client noticed that the server is not a supported distribution of OpenSearch') - }) -}) - -test('Multiple subsequent calls, no errors', t => { - t.plan(15) - const MockConnection = buildMockConnection({ - onRequest (params) { - return { - statusCode: 200, - body: { - name: '1ef419078577', - cluster_name: 'docker-cluster', - cluster_uuid: 'cQ5pAMvRRTyEzObH4L5mTA', - version: { - number: '7.10.0', - build_type: 'docker', - build_hash: '5fb4c050958a6b0b6a70a6fb3e616d0e390eaac3', - build_date: '2021-07-10T01:45:02.136546168Z', - build_snapshot: true, - lucene_version: '8.9.0', - minimum_wire_compatibility_version: '7.15.0', - minimum_index_compatibility_version: '7.0.0' - } - } - } - } - }) - - const requests = [{ - method: 'GET', - path: '/' - }, { - method: 'POST', - path: '/foo/_search' - }, { - method: 'HEAD', - path: '/' - }, { - method: 'POST', - path: '/foo/_doc' - }] - - const client = new Client({ - node: 'http://localhost:9200', - Connection: MockConnection - }) - - client.on('request', (err, event) => { - t.error(err) - const req = requests.shift() - t.equal(event.meta.request.params.method, req.method) - t.equal(event.meta.request.params.path, req.path) - }) - - client.search({ - index: 'foo', - body: { - query: { - match_all: {} - } - } - }, (err, result) => { - t.error(err) - }) - - client.ping((err, result) => { - t.error(err) - }) - - client.index({ - index: 'foo', - body: { - foo: 'bar' - } - }, (err, result) => { - t.error(err) - }) -}) - -test('Multiple subsequent calls, with errors', t => { - t.plan(7) - const MockConnection = buildMockConnection({ - onRequest (params) { - return { - statusCode: 200, - body: { - name: '1ef419078577', - cluster_name: 'docker-cluster', - cluster_uuid: 'cQ5pAMvRRTyEzObH4L5mTA', - version: { - number: '6.8.0', - build_type: 'docker', - build_hash: '5fb4c050958a6b0b6a70a6fb3e616d0e390eaac3', - build_date: '2021-07-10T01:45:02.136546168Z', - build_snapshot: true, - lucene_version: '8.9.0', - minimum_wire_compatibility_version: '7.15.0', - minimum_index_compatibility_version: '7.0.0' - } - } - } - } - }) - - const requests = [{ - method: 'GET', - path: '/' - }, { - method: 'POST', - path: '/foo/_search' - }, { - method: 'HEAD', - path: '/' - }, { - method: 'POST', - path: '/foo/_doc' - }] - - const client = new Client({ - node: 'http://localhost:9200', - Connection: MockConnection - }) - - client.on('request', (err, event) => { - const req = requests.shift() - if (req.method === 'GET') { - t.error(err) - } else { - t.equal(err.message, 'The client noticed that the server is not a supported distribution of OpenSearch') - } - }) - - client.search({ - index: 'foo', - body: { - query: { - match_all: {} - } - } - }, (err, result) => { - t.equal(err.message, 'The client noticed that the server is not a supported distribution of OpenSearch') - }) - - client.ping((err, result) => { - t.equal(err.message, 'The client noticed that the server is not a supported distribution of OpenSearch') - }) - - client.index({ - index: 'foo', - body: { - foo: 'bar' - } - }, (err, result) => { - t.equal(err.message, 'The client noticed that the server is not a supported distribution of OpenSearch') - }) -}) - -test('Later successful call', t => { - t.plan(11) - const MockConnection = buildMockConnection({ - onRequest (params) { - return { - statusCode: 200, - body: { - name: '1ef419078577', - cluster_name: 'docker-cluster', - cluster_uuid: 'cQ5pAMvRRTyEzObH4L5mTA', - version: { - number: '7.10.0', - build_type: 'docker', - build_hash: '5fb4c050958a6b0b6a70a6fb3e616d0e390eaac3', - build_date: '2021-07-10T01:45:02.136546168Z', - build_snapshot: true, - lucene_version: '8.9.0', - minimum_wire_compatibility_version: '7.15.0', - minimum_index_compatibility_version: '7.0.0' - } - } - } - } - }) - - const requests = [{ - method: 'GET', - path: '/' - }, { - method: 'POST', - path: '/foo/_search' - }, { - method: 'POST', - path: '/foo/_search' - }] - - const client = new Client({ - node: 'http://localhost:9200', - Connection: MockConnection - }) - - client.on('request', (err, event) => { - t.error(err) - const req = requests.shift() - t.equal(event.meta.request.params.method, req.method) - t.equal(event.meta.request.params.path, req.path) - }) - - client.search({ - index: 'foo', - body: { - query: { - match_all: {} - } - } - }, (err, result) => { - t.error(err) - }) - - setTimeout(() => { - client.search({ - index: 'foo', - body: { - query: { - match_all: {} - } - } - }, (err, result) => { - t.error(err) - }) - }, 100) -}) - -test('Later errored call', t => { - t.plan(5) - const MockConnection = buildMockConnection({ - onRequest (params) { - return { - statusCode: 200, - body: { - name: '1ef419078577', - cluster_name: 'docker-cluster', - cluster_uuid: 'cQ5pAMvRRTyEzObH4L5mTA', - version: { - number: '6.8.0', - build_type: 'docker', - build_hash: '5fb4c050958a6b0b6a70a6fb3e616d0e390eaac3', - build_date: '2021-07-10T01:45:02.136546168Z', - build_snapshot: true, - lucene_version: '8.9.0', - minimum_wire_compatibility_version: '7.15.0', - minimum_index_compatibility_version: '7.0.0' - } - } - } - } - }) - - const requests = [{ - method: 'GET', - path: '/' - }, { - method: 'POST', - path: '/foo/_search' - }, { - method: 'POST', - path: '/foo/_search' - }] - - const client = new Client({ - node: 'http://localhost:9200', - Connection: MockConnection - }) - - client.on('request', (err, event) => { - const req = requests.shift() - if (req.method === 'GET') { - t.error(err) - } else { - t.equal(err.message, 'The client noticed that the server is not a supported distribution of OpenSearch') - } - }) - - client.search({ - index: 'foo', - body: { - query: { - match_all: {} - } - } - }, (err, result) => { - t.equal(err.message, 'The client noticed that the server is not a supported distribution of OpenSearch') - }) - - setTimeout(() => { - client.search({ - index: 'foo', - body: { - query: { - match_all: {} - } - } - }, (err, result) => { - t.equal(err.message, 'The client noticed that the server is not a supported distribution of OpenSearch') - }) - }, 100) -}) - -test('Bad info response', t => { - t.plan(3) - const MockConnection = buildMockConnection({ - onRequest (params) { - return { - statusCode: 200, - body: { - name: '1ef419078577', - cluster_name: 'docker-cluster', - cluster_uuid: 'cQ5pAMvRRTyEzObH4L5mTA' - } - } - } - }) - - const requests = [{ - method: 'GET', - path: '/' - }, { - method: 'POST', - path: '/foo/_search' - }] - - const client = new Client({ - node: 'http://localhost:9200', - Connection: MockConnection - }) - - client.on('request', (err, event) => { - const req = requests.shift() - if (req.method === 'GET') { - t.error(err) - } else { - t.equal(err.message, 'The client noticed that the server is not a supported distribution of OpenSearch') - } - }) - - client.search({ - index: 'foo', - body: { - query: { - match_all: {} - } - } - }, (err, result) => { - t.equal(err.message, 'The client noticed that the server is not a supported distribution of OpenSearch') - }) -}) - -test('No multiple checks with child clients', t => { - t.plan(11) - const MockConnection = buildMockConnection({ - onRequest (params) { - return { - statusCode: 200, - body: { - name: '1ef419078577', - cluster_name: 'docker-cluster', - cluster_uuid: 'cQ5pAMvRRTyEzObH4L5mTA', - version: { - number: '7.10.0', - build_type: 'docker', - build_hash: '5fb4c050958a6b0b6a70a6fb3e616d0e390eaac3', - build_date: '2021-07-10T01:45:02.136546168Z', - build_snapshot: true, - lucene_version: '8.9.0', - minimum_wire_compatibility_version: '7.15.0', - minimum_index_compatibility_version: '7.0.0' - } - } - } - } - }) - - const requests = [{ - method: 'GET', - path: '/' - }, { - method: 'POST', - path: '/foo/_search' - }, { - method: 'POST', - path: '/foo/_search' - }] - - const client = new Client({ - node: 'http://localhost:9200', - Connection: MockConnection - }) - - client.on('request', (err, event) => { - t.error(err) - const req = requests.shift() - t.equal(event.meta.request.params.method, req.method) - t.equal(event.meta.request.params.path, req.path) - }) - - client.search({ - index: 'foo', - body: { - query: { - match_all: {} - } - } - }, (err, result) => { - t.error(err) - }) - - setTimeout(() => { - const child = client.child() - child.search({ - index: 'foo', - body: { - query: { - match_all: {} - } - } - }, (err, result) => { - t.error(err) - }) - }, 100) -}) diff --git a/test/fixtures/stackoverflow.ndjson b/test/fixtures/stackoverflow.ndjson index 3615e86fa..154138b19 100644 --- a/test/fixtures/stackoverflow.ndjson +++ b/test/fixtures/stackoverflow.ndjson @@ -1618,7 +1618,7 @@ {"id":"7774806","title":"Illegal argument exception with Json","body":"\u003cp\u003eI get this error when running my code, and I don't receive anything back from json\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e10-15 00:29:22.396: WARN/System.err(562): java.lang.IllegalArgumentException: Illegal character in query at index 68: http://www.hotels-in-london-hotels.com/mytrolly/service.php?request={\"mode\":\"category\"}\n10-15 00:29:22.425: WARN/System.err(562): at java.net.URI.create(URI.java:970)\n10-15 00:29:22.425: WARN/System.err(562): at org.apache.http.client.methods.HttpGet.\u0026lt;init\u0026gt;(HttpGet.java:75)\n10-15 00:29:22.436: WARN/System.err(562): at com.sampleapp.MainActivity$iTab.readTwitterFeed(MainActivity.java:128)\n10-15 00:29:22.436: WARN/System.err(562): at com.sampleapp.MainActivity$iTab.\u0026lt;init\u0026gt;(MainActivity.java:65)\n10-15 00:29:22.436: WARN/System.err(562): at java.lang.reflect.Constructor.constructNative(Native Method)\n10-15 00:29:22.446: WARN/System.err(562): at java.lang.reflect.Constructor.newInstance(Constructor.java:446)\n10-15 00:29:22.446: WARN/System.err(562): at android.view.LayoutInflater.createView(LayoutInflater.java:500)\n10-15 00:29:22.456: WARN/System.err(562): at android.view.LayoutInflater.createViewFromTag(LayoutInflater.java:565)\n10-15 00:29:22.456: WARN/System.err(562): at android.view.LayoutInflater.rInflate(LayoutInflater.java:618)\n10-15 00:29:22.466: WARN/System.err(562): at android.view.LayoutInflater.inflate(LayoutInflater.java:407)\n10-15 00:29:22.466: WARN/System.err(562): at android.view.LayoutInflater.inflate(LayoutInflater.java:320)\n10-15 00:29:22.466: WARN/System.err(562): at android.view.LayoutInflater.inflate(LayoutInflater.java:276)\n10-15 00:29:22.476: WARN/System.err(562): at com.android.internal.policy.impl.PhoneWindow.setContentView(PhoneWindow.java:198)\n10-15 00:29:22.476: WARN/System.err(562): at android.app.Activity.setContentView(Activity.java:1647)\n10-15 00:29:22.476: WARN/System.err(562): at com.sampleapp.MainActivity.onCreate(MainActivity.java:362)\n10-15 00:29:22.486: WARN/System.err(562): at android.app.Instrumentation.callActivityOnCreate(Instrumentation.java:1047)\n10-15 00:29:22.486: WARN/System.err(562): at android.app.ActivityThread.performLaunchActivity(ActivityThread.java:2627)\n10-15 00:29:22.486: WARN/System.err(562): at android.app.ActivityThread.handleLaunchActivity(ActivityThread.java:2679)\n10-15 00:29:22.496: WARN/System.err(562): at android.app.ActivityThread.access$2300(ActivityThread.java:125)\n10-15 00:29:22.496: WARN/System.err(562): at android.app.ActivityThread$H.handleMessage(ActivityThread.java:2033)\n10-15 00:29:22.496: WARN/System.err(562): at android.os.Handler.dispatchMessage(Handler.java:99)\n10-15 00:29:22.506: WARN/System.err(562): at android.os.Looper.loop(Looper.java:123)\n10-15 00:29:22.506: WARN/System.err(562): at android.app.ActivityThread.main(ActivityThread.java:4627)\n10-15 00:29:22.506: WARN/System.err(562): at java.lang.reflect.Method.invokeNative(Native Method)\n10-15 00:29:22.506: WARN/System.err(562): at java.lang.reflect.Method.invoke(Method.java:521)\n10-15 00:29:22.506: WARN/System.err(562): at com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:868)\n10-15 00:29:22.506: WARN/System.err(562): at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:626)\n10-15 00:29:22.506: WARN/System.err(562): at dalvik.system.NativeStart.main(Native Method)\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eBasically I'm trying to use json, and the problem I'm having is with this line\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eHttpGet httpGet = new HttpGet(\n \"http://www.hotels-in-london-hotels.com/mytrolly/service.php?request={\\\"mode\\\":\\\"category\\\"}\");\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eI've encoded the string, and I still get the exception \u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003etry {\n url = \"http://www.hotels-in-london-hotels.com/mytrolly/service.php?request={\\\"mode\\\":\\\"category\\\"}\";\n String encodedurl = URLEncoder.encode(url,\"UTF-8\");\n Log.d(\"TEST\", encodedurl);\n } catch (UnsupportedEncodingException e) {\n e.printStackTrace();\n } \n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eAny solutions?\u003c/p\u003e","accepted_answer_id":"7774876","answer_count":"3","comment_count":"0","creation_date":"2011-10-15 00:10:21.81 UTC","last_activity_date":"2014-04-21 10:21:07.92 UTC","last_edit_date":"2011-10-15 00:38:03.373 UTC","last_editor_display_name":"","last_editor_user_id":"932381","owner_display_name":"","owner_user_id":"932381","post_type_id":"1","score":"0","tags":"android|json","view_count":"2110"} {"id":"37062319","title":"Love2d move an object on screen","body":"\u003cp\u003eI am attempting to use keyboard input to translate a label around the screen. Currently only down and left are functioning. My code is below. \u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003edebug = true\ndown = 0\nup = 0\nleft = 0\nright = 0\ntext = 'non'\n\nx = 100\ny = 100\n\ndx = 0\ndy = 0\nfunction love.load(arg)\n\nend\n\nfunction love.update(dt)\n if love.keyboard.isDown('escape') then\n love.event.push('quit')\n end\n\n if up == 1 then\n dy = -1\n end\n if up == 0 then\n dy = 0\n end\n\n if down == 1 then\n dy = 1\n end\n if down == 0 then\n dy = 0\n end\n\n if right == 1 then\n dx = 1\n end\n if right == 0 then\n dx = 0\n end\n\n if left == 1 then\n dx = -1\n end\n if left == 0 then\n dx = 0\n end\nend\n\nfunction love.keypressed(key)\n if key == 'up' or key == 'w' then\n text = 'up'\n up = 1\n end\n if key == 'down' or key == 's' then\n text = 'down'\n down = 1\n end\n if key == 'right' or key == 'd' then\n text = 'right'\n right = 1\n end\n if key == 'left' or key == 'a' then\n text = 'left'\n left = 1\n end\nend\n\nfunction love.keyreleased(key)\n text = 'non'\n\n if key == 'up' or key == 'w' then\n up = 0\n end\n if key == 'down' or key == 's' then\n down = 0\n end\n if key == 'right' or key == 'd' then\n right = 0\n end\n if key == 'left' or key == 'a' then\n left = 0\n end\nend\n\nfunction love.draw(dt)\n x = x + dx\n y = y + dy\n love.graphics.print(text, x, y)\nend\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eExperimentation has shown that the order of if statements in the love.update(dt) section effects which directions work but I cannot get all four to work at the same time.\u003c/p\u003e","answer_count":"1","comment_count":"1","creation_date":"2016-05-06 00:21:26.117 UTC","last_activity_date":"2016-05-06 05:28:41.39 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"3668936","post_type_id":"1","score":"2","tags":"lua|love2d","view_count":"331"} {"id":"11398320","title":"How to group a List\u003cT\u003e that has a property of List\u003cstring\u003e by that nested List?","body":"\u003cp\u003eI have a custom data type that contains a \u003ccode\u003eList\u0026lt;string\u0026gt;\u003c/code\u003e.\u003c/p\u003e\n\n\u003cp\u003eI wish to group a \u003ccode\u003eList\u003c/code\u003e of \u003ccode\u003eCustomDataType\u003c/code\u003e by that nested \u003ccode\u003eList\u0026lt;string\u0026gt;\u003c/code\u003e.\u003c/p\u003e\n\n\u003cp\u003eI have tried the following\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e compoundSchedules.GroupBy(a =\u0026gt; a.Timepoints);\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eWhere \u003ccode\u003eTimepoints\u003c/code\u003e is a list of dates represented as strings. Where any \u003ccode\u003eCustomDataType\u003c/code\u003es have identical timepoints, I wish them to be grouped together.\nUsing the code above, it does not group them and instead just repeats the \u003ccode\u003eList\u003c/code\u003e of \u003ccode\u003eCustomDataType\u003c/code\u003e with its timepoint list as the \u003ccode\u003eIGrouping\u003c/code\u003e \u003ccode\u003eKey\u003c/code\u003e.\u003c/p\u003e\n\n\u003cp\u003eThanks.\u003c/p\u003e","accepted_answer_id":"11398687","answer_count":"4","comment_count":"3","creation_date":"2012-07-09 15:32:45.677 UTC","last_activity_date":"2012-07-09 15:51:41.217 UTC","last_edit_date":"2012-07-09 15:42:14.74 UTC","last_editor_display_name":"","last_editor_user_id":"71059","owner_display_name":"","owner_user_id":"482138","post_type_id":"1","score":"4","tags":"c#|linq|group-by","view_count":"130"} -{"id":"42158672","title":"Wrong type error on an interface intended to test a method using Docker's client API","body":"\u003cp\u003eI'm refactoring a program I wrote so I can properly write tests for it. One of the first methods I'd like to test is a method that uses \u003ca href=\"https://github.com/docker/docker/tree/master/client\" rel=\"nofollow noreferrer\"\u003eDocker's client API\u003c/a\u003e to see if a certain image exists on a Docker host.\u003c/p\u003e\n\n\u003cp\u003eTo be able to test this method, I created an interface that matches \u003ccode\u003eclient.ImageList\u003c/code\u003e's \u003ca href=\"https://godoc.org/github.com/docker/docker/client#Client.ImageList\" rel=\"nofollow noreferrer\"\u003esignature\u003c/a\u003e:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003etype ImageLister interface {\n ImageList(ctx context.Context, options types.ImageListOptions) ([]types.ImageSummary, error)\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eI also changed the method to test to take an \u003ccode\u003eImageLister\u003c/code\u003e as argument, so I can pass in an \u003ccode\u003eImageLister\u003c/code\u003e implementation specific to my tests.\u003c/p\u003e\n\n\u003cp\u003eHowever, in my actual code, where I pass in the \"real\" Docker client to the method to test, the following compilation error occurs:\u003c/p\u003e\n\n\u003cblockquote\u003e\n \u003cp\u003eImageExists:\n *client.Client does not implement ImageLister (wrong type for ImageList method)\n have ImageList(\"github.com/docker/docker/vendor/golang.org/x/net/context\".Context, types.ImageListOptions) ([]types.ImageSummary, error)\n want ImageList(\"context\".Context, types.ImageListOptions) ([]types.ImageSummary, error)\u003c/p\u003e\n\u003c/blockquote\u003e\n\n\u003cp\u003eHow can I resolve this? Or is my approach bad anyway, and should I go a different route?\u003c/p\u003e\n\n\u003cp\u003eedit:\nThe following program reproduces the issue I'm encountering.\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003epackage main\n\nimport (\n \"context\"\n \"github.com/docker/docker/api/types\"\n \"github.com/docker/docker/client\"\n)\n\ntype ImageLister interface {\n ImageList(ctx context.Context, options types.ImageListOptions) ([]types.ImageSummary, error)\n}\n\nfunc main() {\n client, err := client.NewEnvClient()\n defer client.Close()\n\n ImageExists(context.TODO(), client, \"foo\")\n}\n\nfunc ImageExists(ctx context.Context, lister ImageLister, image string) (bool, error) {\n return true, nil\n}\n\u003c/code\u003e\u003c/pre\u003e","accepted_answer_id":"42165701","answer_count":"2","comment_count":"2","creation_date":"2017-02-10 11:48:44.95 UTC","last_activity_date":"2017-02-10 17:51:31.75 UTC","last_edit_date":"2017-02-10 13:12:54.46 UTC","last_editor_display_name":"","last_editor_user_id":"2160748","owner_display_name":"","owner_user_id":"2160748","post_type_id":"1","score":"1","tags":"testing|go|docker","view_count":"113"} +{"id":"42158672","title":"Wrong type error on an interface intended to test a method using Docker's client API","body":"\u003cp\u003eI'm refactoring a program I wrote so I can properly write tests for it. One of the first methods I'd like to test is a method that uses \u003ca href=\"https://github.com/docker/docker/tree/main/client\" rel=\"nofollow noreferrer\"\u003eDocker's client API\u003c/a\u003e to see if a certain image exists on a Docker host.\u003c/p\u003e\n\n\u003cp\u003eTo be able to test this method, I created an interface that matches \u003ccode\u003eclient.ImageList\u003c/code\u003e's \u003ca href=\"https://godoc.org/github.com/docker/docker/client#Client.ImageList\" rel=\"nofollow noreferrer\"\u003esignature\u003c/a\u003e:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003etype ImageLister interface {\n ImageList(ctx context.Context, options types.ImageListOptions) ([]types.ImageSummary, error)\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eI also changed the method to test to take an \u003ccode\u003eImageLister\u003c/code\u003e as argument, so I can pass in an \u003ccode\u003eImageLister\u003c/code\u003e implementation specific to my tests.\u003c/p\u003e\n\n\u003cp\u003eHowever, in my actual code, where I pass in the \"real\" Docker client to the method to test, the following compilation error occurs:\u003c/p\u003e\n\n\u003cblockquote\u003e\n \u003cp\u003eImageExists:\n *client.Client does not implement ImageLister (wrong type for ImageList method)\n have ImageList(\"github.com/docker/docker/vendor/golang.org/x/net/context\".Context, types.ImageListOptions) ([]types.ImageSummary, error)\n want ImageList(\"context\".Context, types.ImageListOptions) ([]types.ImageSummary, error)\u003c/p\u003e\n\u003c/blockquote\u003e\n\n\u003cp\u003eHow can I resolve this? Or is my approach bad anyway, and should I go a different route?\u003c/p\u003e\n\n\u003cp\u003eedit:\nThe following program reproduces the issue I'm encountering.\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003epackage main\n\nimport (\n \"context\"\n \"github.com/docker/docker/api/types\"\n \"github.com/docker/docker/client\"\n)\n\ntype ImageLister interface {\n ImageList(ctx context.Context, options types.ImageListOptions) ([]types.ImageSummary, error)\n}\n\nfunc main() {\n client, err := client.NewEnvClient()\n defer client.Close()\n\n ImageExists(context.TODO(), client, \"foo\")\n}\n\nfunc ImageExists(ctx context.Context, lister ImageLister, image string) (bool, error) {\n return true, nil\n}\n\u003c/code\u003e\u003c/pre\u003e","accepted_answer_id":"42165701","answer_count":"2","comment_count":"2","creation_date":"2017-02-10 11:48:44.95 UTC","last_activity_date":"2017-02-10 17:51:31.75 UTC","last_edit_date":"2017-02-10 13:12:54.46 UTC","last_editor_display_name":"","last_editor_user_id":"2160748","owner_display_name":"","owner_user_id":"2160748","post_type_id":"1","score":"1","tags":"testing|go|docker","view_count":"113"} {"id":"30508313","title":"python subprocess sub thread does not exit?","body":"\u003cp\u003ethe following code is strange:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003edef exec_shell(args, pipe = True):\n pre_fun = None\n if pipe == True:\n pre_fun = lambda: signal.signal(signal.SIGPIPE, signal.SIG_DFL)\n\n process = subprocess.Popen(args, stdout=subprocess.PIPE,\n stderr=subprocess.PIPE,\n preexec_fn = pre_fun)\n (out, err) = process.communicate() \n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003ewhen i execute a complicate shell script,\nif i set the pipe is true:\n the err will be :\n A thread exited while 2 threads were running\nif i set pipe false the err will be : broken pipe\u003c/p\u003e\n\n\u003cp\u003ewho can help me ? thanks\u003c/p\u003e","answer_count":"1","comment_count":"3","creation_date":"2015-05-28 13:41:52.96 UTC","last_activity_date":"2015-05-28 13:58:39.69 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"1503918","post_type_id":"1","score":"0","tags":"python","view_count":"102"} {"id":"34653034","title":"Repeating an entry on the result of a get request in angular","body":"\u003cp\u003eI am trying to populate a list in angular based on the returned result of a get request to my back end.\u003c/p\u003e\n\n\u003cp\u003eI have the following implementation but it is not populating. The call is being made to the back end, the view is however not being populated\u003c/p\u003e\n\n\u003cp\u003eView:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e \u0026lt;div ng-controller=\"MyController as controller\"\u0026gt;\n \u0026lt;ul\u0026gt;\n \u0026lt;li ng-repeat=\"page in myPages\"\u0026gt;\n Test\n \u0026lt;/li\u0026gt;\n \u0026lt;/ul\u0026gt;\n \u0026lt;/div\u0026gt;\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eMyController:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e(function() {\n 'use strict';\n\n function MyController($scope, $stateParams, MyService) {\n $scope.init = function() {\n $scope.myService = MyService;\n $scope.myPages = $scope.getPages();\n }\n $scope.getPages = function() {\n return $scope.myService.getPages(null);\n }\n\n $scope.init();\n }\n MyController.$inject = ['$scope', '$stateParams', '$MyService'];\n angular.module('MyModule').controller('MyController', MyController);\n})();\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eMyService:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e(function() {\n 'use strict';\n\n function $MyService($http, API_CONFIG) {\n function getPages(active) {\n return $http.get('my/url', {\n params: {\n activeOnly : active\n }\n });\n }\n return { getPages : getPages } \n }\n $MyService.$inject = ['$http', 'API_CONFIG'];\n angular.module('myServiceModule', []);\n angular.module('myServiceModule').factory('MyService', $MyService);\n}\n\u003c/code\u003e\u003c/pre\u003e","answer_count":"0","comment_count":"0","creation_date":"2016-01-07 10:39:33.95 UTC","last_activity_date":"2016-01-07 10:39:33.95 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"1860517","post_type_id":"1","score":"1","tags":"angularjs","view_count":"17"} {"id":"17769385","title":"How to adjust table size to correspond to the media screen size?","body":"\u003cp\u003eI'm having difficulties adjusting my tables to fit media screen sizes. The layout as it is now adjusts its content via media queries. In effect, there is no need for a scroll bar. \u003c/p\u003e\n\n\u003cp\u003eHere's some code...\u003c/p\u003e\n\n\u003cp\u003e(1) The HTML:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e\u0026lt;div id=\"results\"\u0026gt;\n\u0026lt;/div\u0026gt;\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eYa, it's really simple. This tag actually resides in a few other divs that hold content. This particular div holds content that is sent from the server when a request is made. It responds properly when plain text is the content, but tables cause an issue.\u003c/p\u003e\n\n\u003cp\u003eHere's my CSS code...\u003c/p\u003e\n\n\u003cp\u003e(1) The CSS for \u003ccode\u003e#results\u003c/code\u003e\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003ediv#results {\n width: 90%;\n margin: 0 auto;\n margin-top: 30px;\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003e(2) The CSS for tables:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003etable {\n font-family: 'PT Sans', Arial, sans-serif; \n color: #666;\n font-size: 12px;\n text-shadow: 1px 1px 0px #fff;\n background: #eaebec;\n margin: 20px;\n border: #ccc 1px solid;\n width: 80%;\n margin: 0 auto;\n}\n\ntable th {\n padding: 21px 25px 22px 25px;\n border-top: 1px solid #fafafa;\n border-bottom: 1px solid #e0e0e0;\n background: #ededed;\n}\n\ntable th:first-child {\n text-align: left;\n padding-left:20px;\n}\n\ntable tr {\n text-align: center;\n padding-left: 20px;\n}\n\ntable td:first-child {\n text-align: left;\n padding-left: 20px;\n border-left: 0px;\n}\n\ntable td {\n padding:8px;\n border-top: 1px solid #ffffff;\n border-bottom:1px solid #e0e0e0;\n border-left: 1px solid #e0e0e0;\n background: #fafafa;\n}\n\ntable tr:hover td {\n background: #f2f2f2;\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eThe table remains the same size regardless of media screen size. The CSS probably makes that obvious. However, what can be done in this case? If I \"squish\" the table, the content will be illegible. For instance, if I want to pull up a table on a smartphone, how can I modify the table so that it is usable? Right now I have 5 columns. Is there a way to possible start a new row, say after two or three columns, in order that the content doesn't have to be compressed?\u003c/p\u003e\n\n\u003cp\u003eThat's the basic idea; I hope that the crux of what I am saying is clear. Any input is appreciated.\u003c/p\u003e","answer_count":"1","comment_count":"0","creation_date":"2013-07-21 05:28:09.603 UTC","last_activity_date":"2013-07-21 07:47:21.507 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"1599549","post_type_id":"1","score":"0","tags":"html|css|table","view_count":"1064"} @@ -4528,7 +4528,7 @@ {"id":"3769171","title":"CSS Holy Grail layout","body":"\u003cp\u003eCan someone break down for me the pieces that compose the Holy Grail Layout with switched div positioning as seen here? \u003ca href=\"http://matthewjamestaylor.com/blog/perfect-3-column.htm\" rel=\"nofollow noreferrer\"\u003ehttp://matthewjamestaylor.com/blog/perfect-3-column.htm\u003c/a\u003e\u003c/p\u003e\n\n\u003cp\u003eThe way I understand is that:\u003c/p\u003e\n\n\u003cul\u003e\n\u003cli\u003e\u003cp\u003ecolmask is just a wrapper to position the content between header and footer\u003c/p\u003e\u003c/li\u003e\n\u003cli\u003e\u003cp\u003ecolmid is another wrapper that i guess accomodates some browsers such as IE7\u003c/p\u003e\u003c/li\u003e\n\u003cli\u003e\u003cp\u003ecolleft is the wrapper of the real thing (not sure why so many wrappers)\u003c/p\u003e\u003c/li\u003e\n\u003cli\u003e\u003cp\u003ecol1, col2 and col3 are the real thing and are all floating left and set their margins to adjust their appearance on the screen\u003c/p\u003e\u003c/li\u003e\n\u003c/ul\u003e\n\n\u003cp\u003eCan someone explain better what's the Zen of that design? I'm trying to apply it to a real world scenario and it's not really working.\u003c/p\u003e","answer_count":"2","comment_count":"2","creation_date":"2010-09-22 12:26:45.513 UTC","last_activity_date":"2013-06-03 17:30:03.4 UTC","last_edit_date":"2011-10-23 21:22:56.213 UTC","last_editor_display_name":"","last_editor_user_id":"963791","owner_display_name":"","owner_user_id":"192337","post_type_id":"1","score":"1","tags":"css","view_count":"2459"} {"id":"20803032","title":"Can not access a member of class \"com.ABC$XYZ\" with modifiers \"synchronized\"","body":"\u003cp\u003eI'm trying to retrieve synchronized method using reflection API. \u003c/p\u003e\n\n\u003cp\u003eSample code snippet given below:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eclass ABC {\n class XYZ {\n synchronized List methodOfXYZ() {\n System.out.println(\"Im in Method\");\n // do sum stuff\n return \u0026lt;Obj-List\u0026gt;;\n }\n }\n}\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eI'm getting the runtime exception like:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003ejava.lang.IllegalAccessException: Class \"com.TestReflection\" can not access a member of class \"com.ABC$XYZ\" with modifiers \"synchronized\".\n\u003c/code\u003e\u003c/pre\u003e","answer_count":"1","comment_count":"2","creation_date":"2013-12-27 15:16:52.4 UTC","last_activity_date":"2013-12-27 15:36:33.677 UTC","last_edit_date":"2013-12-27 15:26:58.833 UTC","last_editor_display_name":"","last_editor_user_id":"1393766","owner_display_name":"","owner_user_id":"3134105","post_type_id":"1","score":"-1","tags":"java|reflection|inner-classes|synchronized|illegalaccessexception","view_count":"1862"} {"id":"69695","title":"stringstream manipulators \u0026 vstudio 2003","body":"\u003cp\u003eI am trying to use a stringstream object in VC++ (VStudio 2003) butI am getting an error when I use the overloaded \u0026lt;\u0026lt; operator to try and set some manipulators. \u003c/p\u003e\n\n\u003cp\u003eI am trying the following: \u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eint SomeInt = 1; \nstringstream StrStream; \nStrStream \u0026lt;\u0026lt; std::setw(2) \u0026lt;\u0026lt; SomeInt; \n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eThis will not compile (error C2593: 'operator \u0026lt;\u0026lt;' is ambiguous).\u003cbr\u003e\nDoes VStudio 2003 support using manipulators in this way?\u003cbr\u003e\nI know that I can just set the width directly on the stringstream object e.g. StrStream.width(2);\u003cbr\u003e\nI was wondering why the more usual method doesn't work?\u003c/p\u003e","accepted_answer_id":"69721","answer_count":"3","comment_count":"0","creation_date":"2008-09-16 05:59:39.3 UTC","last_activity_date":"2013-12-02 13:38:38.773 UTC","last_edit_date":"2013-12-02 13:38:38.773 UTC","last_editor_display_name":"","last_editor_user_id":"2432317","owner_display_name":"Anthony K","owner_user_id":"1682","post_type_id":"1","score":"0","tags":"visual-studio|stl","view_count":"480"} -{"id":"45371157","title":"gRPC in Java - Blocking/nonblocking stubs","body":"\u003cp\u003eI am attempting to create a java grpc client to communicate with a server in go. I am new to grpc so am following this tutorial \u003ca href=\"https://grpc.io/docs/tutorials/basic/java.html\" rel=\"nofollow noreferrer\"\u003egRPC Java Tutorial\u003c/a\u003e. In these examples they refer to blocking and nonblocking stubs which they appear to import from elsewhere in their \u003ca href=\"https://github.com/grpc/grpc-java/tree/master/examples/src/main/java/io/grpc/examples/routeguide\" rel=\"nofollow noreferrer\"\u003egithub\u003c/a\u003e.\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eimport io.grpc.examples.routeguide.RouteGuideGrpc.RouteGuideBlockingStub;\nimport io.grpc.examples.routeguide.RouteGuideGrpc.RouteGuideStub;\n...\n... \nblockingStub = RouteGuideGrpc.newBlockingStub(channel);\nasyncStub = RouteGuideGrpc.newStub(channel);\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eHowever I cannot find these classes in their repo. I am still hazy on exactly what they are for, should they have been produced when compiling the .proto file? Any help/pointers would be appreciated. Thanks.\u003c/p\u003e","accepted_answer_id":"45380216","answer_count":"2","comment_count":"0","creation_date":"2017-07-28 10:30:55.91 UTC","last_activity_date":"2017-07-28 19:45:00.887 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"7819897","post_type_id":"1","score":"0","tags":"java|grpc|grpc-java","view_count":"376"} +{"id":"45371157","title":"gRPC in Java - Blocking/nonblocking stubs","body":"\u003cp\u003eI am attempting to create a java grpc client to communicate with a server in go. I am new to grpc so am following this tutorial \u003ca href=\"https://grpc.io/docs/tutorials/basic/java.html\" rel=\"nofollow noreferrer\"\u003egRPC Java Tutorial\u003c/a\u003e. In these examples they refer to blocking and nonblocking stubs which they appear to import from elsewhere in their \u003ca href=\"https://github.com/grpc/grpc-java/tree/main/examples/src/main/java/io/grpc/examples/routeguide\" rel=\"nofollow noreferrer\"\u003egithub\u003c/a\u003e.\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eimport io.grpc.examples.routeguide.RouteGuideGrpc.RouteGuideBlockingStub;\nimport io.grpc.examples.routeguide.RouteGuideGrpc.RouteGuideStub;\n...\n... \nblockingStub = RouteGuideGrpc.newBlockingStub(channel);\nasyncStub = RouteGuideGrpc.newStub(channel);\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eHowever I cannot find these classes in their repo. I am still hazy on exactly what they are for, should they have been produced when compiling the .proto file? Any help/pointers would be appreciated. Thanks.\u003c/p\u003e","accepted_answer_id":"45380216","answer_count":"2","comment_count":"0","creation_date":"2017-07-28 10:30:55.91 UTC","last_activity_date":"2017-07-28 19:45:00.887 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"7819897","post_type_id":"1","score":"0","tags":"java|grpc|grpc-java","view_count":"376"} {"id":"28805823","title":"Control Timestamp precision of random timestamps","body":"\u003cp\u003eI've a code for generating random date strings. However I came to find that when I generate a random timestamp, it contains some precision decimal points to seconds field. How ever my SimpleDateFormat does not contain such precision values, does anyone know what is wrong here, and how can I remove or control the precision values ?\u003c/p\u003e\n\n\u003cp\u003e\u003cstrong\u003eCode\u003c/strong\u003e\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003elong rangeBegin = Timestamp.valueOf(\"2015-01-01 00:00:00\").getTime();\n SimpleDateFormat simpleDateFormat = new SimpleDateFormat(\"yyyy-MM-dd HH:mm:ss\");\n Date currentDate = new Date();\n String currentDateString = simpleDateFormat.format(currentDate);\n long rangeEnd = Timestamp.valueOf(currentDateString).getTime();\n long diff = rangeEnd - rangeBegin + 1;\n Timestamp randomTimestamp = new Timestamp(rangeBegin + (long)(Math.random() * diff));\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003e\u003cstrong\u003eSample output\u003c/strong\u003e\u003c/p\u003e\n\n\u003cp\u003erandomTimestamp = 2015-02-20 02:36:00.\u003cstrong\u003e646\u003c/strong\u003e\u003c/p\u003e\n\n\u003cp\u003eThanks\u003c/p\u003e\n\n\u003cp\u003e\u003cstrong\u003eEdit\u003c/strong\u003e :\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eString randomTimestampString = String.valueOf(randomTimestamp).split(\"\\\\.\")[0];\n\u003c/code\u003e\u003c/pre\u003e","answer_count":"2","comment_count":"2","creation_date":"2015-03-02 08:57:56.263 UTC","last_activity_date":"2015-12-11 14:07:29.993 UTC","last_edit_date":"2015-12-11 14:07:29.993 UTC","last_editor_display_name":"","last_editor_user_id":"2604735","owner_display_name":"","owner_user_id":"2604735","post_type_id":"1","score":"-2","tags":"java|random|timestamp|simpledateformat|precision","view_count":"107"} {"id":"36729973","title":"When is it necessary to use the Object.assign() method to copy an instance of an object?","body":"\u003cp\u003e\u003cstrong\u003eThe following is an example scenario I made up for my own practice of this problem. If you want to skip straight to the technical details, please see 'Technical Details' below.\u003c/strong\u003e\u003c/p\u003e\n\n\u003cp\u003eI have a personal project I've been working on to learn JavaScript. Basically, the user can design a shoe by picking available options.\u003c/p\u003e\n\n\u003cp\u003eThe trick is that the left and right shoe must have the same size, among other properties, but things like color, shoe lace texture, etc. can be independent properties per shoe. (I figured this was a decent way for me to practice object manipulation and inheritance).\u003c/p\u003e\n\n\u003cp\u003eThe user starts off with designing the right shoe; when the \"swap\" button is clicked to look at the left shoe, the user currently sees a copy of the right shoe (but inverted). \u003cstrong\u003eOnly on the first swapping of shoes is the left shoe generated and made an exact copy of the right shoe. From then onwards, unique options per shoe orientation are preserved.\u003c/strong\u003e Then, if the user makes specific changes to \u003cem\u003ethat\u003c/em\u003e left-shoe model, and then switches to the right shoe, the user is supposed to see the exact same right shoe that they had originally designed before they clicked the \"swap\" button.\u003c/p\u003e\n\n\u003cp\u003eSo if their right shoe had red laces, they switch to the left shoe view and make the left shoe have a blue lace, then when switching back to the right shoe the user should see red laces!\u003c/p\u003e\n\n\u003chr\u003e\n\n\u003cp\u003e\u003cstrong\u003eTechnical Details\u003c/strong\u003e \u003c/p\u003e\n\n\u003cp\u003eWhen I was writing the code for my main project I was running into trouble with the unique options being perserved. For example, if I made the laces green for the left shoe, the right shoe would always have green laces. Troubleshooting down to the problem was easy because the only time the right shoe was losing it's unique options, such as a red lace, was when I would set the lace color for the left shoe.\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003econsole.log(\"THE RIGHT LACE BEFORE: \" + rightShoe.laceId);\nleftShoe.laceId = 'green';\nconsole.log(\"THE RIGHT LACE AFTER: \" + rightShoe.laceId);\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eWhat would log to the console was:\u003c/p\u003e\n\n\u003cblockquote\u003e\n \u003cp\u003eTHE RIGHT LACE BEFORE: red\u003c/p\u003e\n \n \u003cp\u003eTHE RIGHT LACE AFTER: green\u003c/p\u003e\n\u003c/blockquote\u003e\n\n\u003cp\u003eEven though I wasn't changing the \u003ccode\u003erightShoe\u003c/code\u003e, it was being changed whenever I changed the \u003ccode\u003eleftShoe\u003c/code\u003e property.\u003c/p\u003e\n\n\u003cp\u003eSo I went back to where I first define the \u003ccode\u003eleftShoe\u003c/code\u003e object, which is when the user clicks \"swap\" for the first time in the life of the script (My amateur thought was \u003cem\u003ewhy propagate and fill the \u003ccode\u003eleftShoe\u003c/code\u003e object if the user possibly never customizes the left shoe?\u003c/em\u003e Maybe it's being unnecessarily stingy with data, I don't know). From then onward, \u003cem\u003eI never redefined the \u003ccode\u003eleftShoe\u003c/code\u003e to be a copy of \u003ccode\u003erightShoe\u003c/code\u003e\u003c/em\u003e or vice versa. I figured that I was getting hung up by the fact that I was probably doing object referencing and, just like with other languages, I was changing the \u003cem\u003ereference\u003c/em\u003e and not the value.\u003c/p\u003e\n\n\u003cp\u003eBefore coming to SO with my troubles, I wanted to make a JSFiddle to recreate the problem. Being that my project is lengthy (around ~1500 lines, including some \u003cstrong\u003eTHREE.js\u003c/strong\u003e for graphics), I did my best to emulate the process. And so \u003ca href=\"https://jsfiddle.net/fuayzgxn/2/\"\u003ehere it is\u003c/a\u003e.\u003c/p\u003e\n\n\u003cp\u003e\u003cem\u003eExcept the JSFiddle worked exactly as I expected it to!\u003c/em\u003e The left model preserved it's unique attribute and data set to that attribute. So, I did a little more digging and read about the \u003cstrong\u003eObject.assign()\u003c/strong\u003e method. So in my original project code (\u003cem\u003enot\u003c/em\u003e the fiddle), I changed this:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eleftShoe = rightShoe;\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eto this:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eleftShoe = Object.assign({}, rightShoe);\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eAs excited as I am to have finally gotten this to work, I am equally amused and perplexed because I don't understand why my JSFiddle didn't need the \u003ccode\u003eassign()\u003c/code\u003e method but my identical project code did. Thank you.\u003c/p\u003e","accepted_answer_id":"36730060","answer_count":"1","comment_count":"14","creation_date":"2016-04-19 21:39:01.063 UTC","last_activity_date":"2016-04-19 22:02:46.1 UTC","last_edit_date":"2016-04-19 22:02:46.1 UTC","last_editor_display_name":"","last_editor_user_id":"5137782","owner_display_name":"","owner_user_id":"5137782","post_type_id":"1","score":"10","tags":"javascript|jquery|object|constructor","view_count":"257"} {"id":"33172645","title":"Looping over an AngularJS grid","body":"\u003cp\u003eI'm trying to loop over an AngularJS grid looking for a row that has an ID I want to delete. I know I need to use the foreach statement, but am unfamiliar with it even after looking at the documentation. An example using the below criteria would be more useful.\u003c/p\u003e\n\n\u003cp\u003eHow would I accomplish this?\u003c/p\u003e\n\n\u003cp\u003eI have the following function that has the grid and row objects.\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eviewModel.viewRow = function (grid, row)\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eI'm trying to compare the row within the grid that has the same ID value as the following: \u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003erow.entity.CheckDepositHeaderId\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eAfter I find it, I simply want to remove the row from the grid collection.\u003c/p\u003e\n\n\u003cp\u003eThanks in advance...\u003c/p\u003e","accepted_answer_id":"35007361","answer_count":"1","comment_count":"0","creation_date":"2015-10-16 14:13:42.09 UTC","last_activity_date":"2016-01-26 05:07:13.873 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"856488","post_type_id":"1","score":"0","tags":"angularjs","view_count":"41"} @@ -4847,7 +4847,7 @@ {"id":"15484635","title":"Sort a collection of randomly generated numbers","body":"\u003cp\u003eI am creating a random number generator in c#\u003c/p\u003e\n\n\u003cp\u003eI generate the numbers as so\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eRandom RandomClass = new Random();\n\nNum1.text = RandomClass.Next(1,49).ToString();\nNum2.text = RandomClass.Next(1,49).ToString();\nNum3.text = RandomClass.Next(1,49).ToString();\nNum4.text = RandomClass.Next(1,49).ToString();\nNum5.text = RandomClass.Next(1,49).ToString();\nNum6.text = RandomClass.Next(1,49).ToString();\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eThe user clicks a button and the numbers are generated, what I want is for there to be a button which can sort the numbers, so for example smallest to lowest.\u003c/p\u003e\n\n\u003cp\u003eCould I turn the numbers generated into an array and call .ToArray and then sort from there? I am unsure how to group the random numbers together to then call a sorting method on them.\u003c/p\u003e","accepted_answer_id":"15484746","answer_count":"5","comment_count":"3","creation_date":"2013-03-18 18:54:10.973 UTC","last_activity_date":"2013-03-18 19:45:24.883 UTC","last_edit_date":"2013-03-18 18:56:48.27 UTC","last_editor_display_name":"","last_editor_user_id":"76337","owner_display_name":"","owner_user_id":"1313306","post_type_id":"1","score":"0","tags":"c#|random","view_count":"1249"} {"id":"20430276","title":"Android 4.1 stock browser: Can't open links in new tab","body":"\u003cp\u003eI am having an issue opening links in a new tab, specifically a link to google maps with location data. It opens in a new tab fine on iOS 6/7, and Chrome Browsers.\u003c/p\u003e\n\n\u003cp\u003eHere is the tags I am using:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e\u0026lt;a class=\"location\" href=\"https://maps.google.com/maps?q={{ eventDetails.location.latitude }},{{ eventDetails.location.longitude }}\" target=\"_blank\"\u0026gt;View on map\u0026lt;/a\u0026gt;\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eThe templates are from AngularJS. I thought \u003ccode\u003e_blank\u003c/code\u003e would be enough to get this browser to pop into a new tab, however the page is loading in the same window.\u003c/p\u003e\n\n\u003cp\u003eThe device is on a Galaxy S3 with Android 4.1.1 installed. The user agent string says \u003ccode\u003eAppleWebKit/534.30 Version/4.0 Mobile Safari/534.30\u003c/code\u003e\u003c/p\u003e","answer_count":"1","comment_count":"2","creation_date":"2013-12-06 17:46:08.857 UTC","last_activity_date":"2014-11-22 04:41:22.837 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"1687443","post_type_id":"1","score":"2","tags":"android|html|angularjs|android-browser","view_count":"496"} {"id":"42111404","title":"removeEventlistener not working as expected with arrow function and parameter","body":"\u003cp\u003eI've got a page which can hold multiple editable contents. I want to fire some kind of check event whenever the content is edited.\u003c/p\u003e\n\n\u003cp\u003eMy code to achieve this looks like this:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e// Find all editable elements.\nlet allEditableElements = document.querySelectorAll('[contenteditable=\"true\"]');\n\nfor(let i = 0; i \u0026lt; allEditableElements.length; i++){\n\n //Remove eventListener to prevent duplicate events.\n allEditableElements[i].removeEventListener('input', (event) =\u0026gt; {myClass.myEventMethod(event);}, false);\n\n // Add event.\n allEditableElements[i].addEventListener('input', (event) =\u0026gt; {myClass.myEventMethod(event);}, false);\n} \n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eEverything works fine so far. But as I said users can edit the content, which includes adding new editable contents to the page itself. At that point the events will be set again, which is why I'm trying to remove the \u003ccode\u003eevent\u003c/code\u003e beforehand.\u003c/p\u003e\n\n\u003cp\u003e\u003cstrong\u003eMy question is why would the \u003ccode\u003eremoveEventListener\u003c/code\u003e function not work as expected? And isn't there a way to name given events like so:\u003c/strong\u003e\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e// With eventNameGivenByUser an event could be removed just by its name.\naddEventListener('eventTriggerName', 'eventNameGivenByUser', function(), [, options]);\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003chr\u003e\n\n\u003cp\u003eOf course I did some research and found out that the code itself would work like this:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e// Find all editable elements.\n\nlet allEditableElements = document.querySelectorAll('[contenteditable=\"true\"]');\n\nfor(let i = 0; i \u0026lt; allEditableElements.length; i++){\n\n //Remove eventListener to prevent duplicate events.\n allEditableElements[i].removeEventListener('input', myClass.myEventMethod, false);\n\n // Add event.\n allEditableElements[i].addEventListener('input', myClass.myEventMethod, false);\n} \n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eHowever this is without passing parameters, which is mandatory in such a dynamic setup...\u003c/p\u003e\n\n\u003cp\u003eHope someone will tell me that in 2017 there is a nice and decent way without using libraries.\u003c/p\u003e\n\n\u003chr\u003e\n\n\u003cp\u003e\u003cstrong\u003eEdit 08.02.2017:\u003c/strong\u003e\u003c/p\u003e\n\n\u003cp\u003eJust for the curious ones:\nThe Solution is to not pass any parameter to the listener:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e// as you can see there is no (event) and no arrow function either.\naddEventListener('input', myClass.myEventMethod, false);\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eAll there is to do now is to call prepare the method like this:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003e// The Parameter will be passed through anyway!\nmyEventMethod(event) {\n\n /**\n * Do stuff with event parameter.\n */\n\n};\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eAfter that the listener can be removed like so:\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003eremoveEventListener('input', myClass.myEventMethod, false);\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003chr\u003e\n\n\u003cp\u003e\u003cstrong\u003eSidenote:\u003c/strong\u003e\u003c/p\u003e\n\n\u003cp\u003eI'm using electron and do not need cross browser support. It just has to be compatible with \u003ccode\u003eChromium: 56.0.2924.87\u003c/code\u003e.\u003c/p\u003e\n\n\u003cp\u003eRegards, Megajin\u003c/p\u003e","accepted_answer_id":"42111869","answer_count":"2","comment_count":"2","creation_date":"2017-02-08 11:05:44.987 UTC","last_activity_date":"2017-02-08 12:17:12.647 UTC","last_edit_date":"2017-02-08 12:17:12.647 UTC","last_editor_display_name":"","last_editor_user_id":"4457744","owner_display_name":"","owner_user_id":"4457744","post_type_id":"1","score":"2","tags":"javascript|ecmascript-6","view_count":"281"} -{"id":"33379589","title":"map showing gray on an Android Wear Emulator","body":"\u003cp\u003ei am using Android Studio, i created a wear AVD with API 22, Activated debugging on the device an obtained the google map API key, i used the example provided by google for a simple Wear application using Google Maps API :\u003c/p\u003e\n\n\u003cp\u003e\u003ca href=\"https://developers.google.com/maps/documentation/android-api/wear\" rel=\"nofollow\"\u003ehttps://developers.google.com/maps/documentation/android-api/wear\u003c/a\u003e\u003c/p\u003e\n\n\u003cp\u003eproject's files are provided here :\u003c/p\u003e\n\n\u003cp\u003e\u003ca href=\"https://github.com/googlemaps/android-samples/tree/master/AndroidWearMap\" rel=\"nofollow\"\u003ehttps://github.com/googlemaps/android-samples/tree/master/AndroidWearMap\u003c/a\u003e\u003c/p\u003e\n\n\u003cp\u003ethe application start but show a gray map without changes.\u003c/p\u003e\n\n\u003cp\u003ei think it is a problem of connectivity since the wear AVD is not connected to the internet like a Handled AVD so it can't download the map files.\u003c/p\u003e\n\n\u003cp\u003ei didn't try to connect the wear AVD with my Android Phone because the Android version it is using is not compatible with the Android Wear application required to connect a wear to Handled devices and and it doesn\"t support higher versions.\u003c/p\u003e\n\n\u003cp\u003eis there a solution to let the wear AVD use my computer internet, or another way to use Google Maps on wear AVD ?\u003c/p\u003e\n\n\u003cp\u003ecan i show the map correctly if i connect my wear AVD to a Handled Device ?\u003c/p\u003e\n\n\u003cp\u003ethanks for your suggestions.\u003c/p\u003e","accepted_answer_id":"35382985","answer_count":"2","comment_count":"0","creation_date":"2015-10-27 22:28:34.41 UTC","favorite_count":"1","last_activity_date":"2016-02-13 17:28:50.49 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"4043486","post_type_id":"1","score":"1","tags":"android|google-maps|android-wear|google-maps-api-2|android-wear-data-api","view_count":"300"} +{"id":"33379589","title":"map showing gray on an Android Wear Emulator","body":"\u003cp\u003ei am using Android Studio, i created a wear AVD with API 22, Activated debugging on the device an obtained the google map API key, i used the example provided by google for a simple Wear application using Google Maps API :\u003c/p\u003e\n\n\u003cp\u003e\u003ca href=\"https://developers.google.com/maps/documentation/android-api/wear\" rel=\"nofollow\"\u003ehttps://developers.google.com/maps/documentation/android-api/wear\u003c/a\u003e\u003c/p\u003e\n\n\u003cp\u003eproject's files are provided here :\u003c/p\u003e\n\n\u003cp\u003e\u003ca href=\"https://github.com/googlemaps/android-samples/tree/main/AndroidWearMap\" rel=\"nofollow\"\u003ehttps://github.com/googlemaps/android-samples/tree/main/AndroidWearMap\u003c/a\u003e\u003c/p\u003e\n\n\u003cp\u003ethe application start but show a gray map without changes.\u003c/p\u003e\n\n\u003cp\u003ei think it is a problem of connectivity since the wear AVD is not connected to the internet like a Handled AVD so it can't download the map files.\u003c/p\u003e\n\n\u003cp\u003ei didn't try to connect the wear AVD with my Android Phone because the Android version it is using is not compatible with the Android Wear application required to connect a wear to Handled devices and and it doesn\"t support higher versions.\u003c/p\u003e\n\n\u003cp\u003eis there a solution to let the wear AVD use my computer internet, or another way to use Google Maps on wear AVD ?\u003c/p\u003e\n\n\u003cp\u003ecan i show the map correctly if i connect my wear AVD to a Handled Device ?\u003c/p\u003e\n\n\u003cp\u003ethanks for your suggestions.\u003c/p\u003e","accepted_answer_id":"35382985","answer_count":"2","comment_count":"0","creation_date":"2015-10-27 22:28:34.41 UTC","favorite_count":"1","last_activity_date":"2016-02-13 17:28:50.49 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"4043486","post_type_id":"1","score":"1","tags":"android|google-maps|android-wear|google-maps-api-2|android-wear-data-api","view_count":"300"} {"id":"21241839","title":"Java code automated documentation /presentation","body":"\u003cp\u003eIs there a tool to create from custom Java code documentation similar to what you can find on \u003ca href=\"http://developer.android.com/reference/android/widget/ArrayAdapter.html\" rel=\"nofollow\"\u003ethis page\u003c/a\u003e?\u003c/p\u003e\n\n\u003cp\u003eBy this I mean listing in a clear way: methods of a class, different types of fields, inheritance, interfaces it implements etc. I want to do this for a library I created. I'm aware that all this information could be found inside the code, but sometimes you don't have time to open every single source file. Preferable html output.\u003c/p\u003e","answer_count":"1","comment_count":"0","creation_date":"2014-01-20 18:50:38.88 UTC","last_activity_date":"2014-01-20 23:25:58.107 UTC","last_edit_date":"2014-01-20 23:25:58.107 UTC","last_editor_display_name":"","last_editor_user_id":"321731","owner_display_name":"","owner_user_id":"3216352","post_type_id":"1","score":"0","tags":"java|documentation|creation","view_count":"51"} {"id":"35600476","title":"Securely transmitting password over TCP Socket in Java","body":"\u003cp\u003eI want to write a secure method of authentication over TCP sockets in Java. I've read reasonably extensively on the subject, but I am by no means an expert. There seems to be a bit of variance in opinion on how this is best done.\u003c/p\u003e\n\n\u003cp\u003eI know pretty well had to generate my password hashes, but that doesn't do me a whole lot of good if an attacker can simply glean the passwords/password hashes while in transit. So I want to create a reasonably secure method of sending this data across.\u003c/p\u003e\n\n\u003cp\u003eThe most popular method seems to be using SSL sockets. However, my code will not be used as a single server (as an online game might be) but instead will have many instances run by various consumers. I personally don't have a means to purchase an SSL certificate, and I can't really ask my consumers to do that either. Creating self-signed certificates and then installing them in a keystore for each client seems both difficult and a little insecure. If I were handling all server instances, this would be a much more viable option, since I would only need one certificate, but as it stands now \u003c/p\u003e\n\n\u003cp\u003eSo I worked on simply securing the TCP socket while performing log ins. I can do this pretty well using a Diffie-Hellman algorithm. However, as I research this, people kept saying to use SSL instead. \u003c/p\u003e\n\n\u003cp\u003eHow can I securely transmit passwords over Java TCP sockets in an efficient but portable manner?\u003c/p\u003e","answer_count":"2","comment_count":"5","creation_date":"2016-02-24 11:10:01.37 UTC","last_activity_date":"2016-02-24 11:44:42.903 UTC","last_editor_display_name":"","owner_display_name":"","owner_user_id":"2719960","post_type_id":"1","score":"0","tags":"java|sockets|security|tcp","view_count":"200"} {"id":"11120835","title":"Why QGraphicsScene::advance() doesn't repaint my item?","body":"\u003cp\u003eI'm trying to move a sprite in a QGraphicsView. I use :\u003c/p\u003e\n\n\u003cpre\u003e\u003ccode\u003econnect(timer, SIGNAL(timeout()), scene, SLOT(advance()));\ntimer-\u0026gt;start(1000/33);\n\u003c/code\u003e\u003c/pre\u003e\n\n\u003cp\u003eBut my sprite is not repainted. I've to do alt-tab to update the view.\u003c/p\u003e","accepted_answer_id":"11121026","answer_count":"2","comment_count":"0","creation_date":"2012-06-20 13:35:18.617 UTC","last_activity_date":"2012-12-20 10:02:19.91 UTC","last_edit_date":"2012-12-20 10:02:19.91 UTC","last_editor_display_name":"","last_editor_user_id":"851404","owner_display_name":"","owner_user_id":"1312748","post_type_id":"1","score":"1","tags":"qt|qgraphicsview","view_count":"1501"} diff --git a/test/integration/README.md b/test/integration/README.md index a6935f1fa..187efdcc6 100644 --- a/test/integration/README.md +++ b/test/integration/README.md @@ -5,8 +5,8 @@ Yes. ## Background -OpenSearch offers its entire API via HTTP REST endpoints. You can find the whole API specification for every version [here](https://github.com/opensearch-project/OpenSearch/tree/master/rest-api-spec/src/main/resources/rest-api-spec/api).
-To support different languages at the same time, the OpenSearch team decided to provide a [YAML specification](https://github.com/opensearch-project/OpenSearch/tree/master/rest-api-spec/src/main/resources/rest-api-spec/test) to test every endpoint, body, headers, warning, error and so on.
+OpenSearch offers its entire API via HTTP REST endpoints. You can find the whole API specification for every version [here](https://github.com/opensearch-project/OpenSearch/tree/main/rest-api-spec/src/main/resources/rest-api-spec/api).
+To support different languages at the same time, the OpenSearch team decided to provide a [YAML specification](https://github.com/opensearch-project/OpenSearch/tree/main/rest-api-spec/src/main/resources/rest-api-spec/test) to test every endpoint, body, headers, warning, error and so on.
This testing suite uses that specification to generate the test for the specified version of OpenSearch on the fly. ## Run From 33acf4c0385193f6d1dd0bd348cdf78775038464 Mon Sep 17 00:00:00 2001 From: Bishoy Boktor Date: Fri, 20 Aug 2021 20:33:49 +0000 Subject: [PATCH 10/10] Minor OpenSearch reference Signed-off-by: Bishoy Boktor --- test/integration/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/integration/README.md b/test/integration/README.md index 187efdcc6..97b4237d9 100644 --- a/test/integration/README.md +++ b/test/integration/README.md @@ -35,7 +35,7 @@ npm run test:integration -- --cov --coverage-report=html ## How does this thing work? At first sight, it might seem complicated, but once you understand what the moving parts are, it's quite easy. 1. Connects to the given OpenSearch instance -1. Gets the opensearch version and build hash +1. Gets the OpenSearch version and build hash 1. Checkout to the given hash (and clone the repository if it is not present) 1. Reads the folder list and for each folder the yaml file list 1. Starts running folder by folder every file