diff --git a/.ci/certs/ca.crt b/.ci/certs/ca.crt new file mode 100755 index 000000000..6402874d5 --- /dev/null +++ b/.ci/certs/ca.crt @@ -0,0 +1,20 @@ +-----BEGIN CERTIFICATE----- +MIIDSTCCAjGgAwIBAgIUIwN+0zglsexRKwE1RGHvlCcmrdwwDQYJKoZIhvcNAQEL +BQAwNDEyMDAGA1UEAxMpRWxhc3RpYyBDZXJ0aWZpY2F0ZSBUb29sIEF1dG9nZW5l +cmF0ZWQgQ0EwHhcNMTkwMjEzMDcyMjQwWhcNMjIwMjEyMDcyMjQwWjA0MTIwMAYD +VQQDEylFbGFzdGljIENlcnRpZmljYXRlIFRvb2wgQXV0b2dlbmVyYXRlZCBDQTCC +ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANILs0JO0e7x29zeVx21qalK +XKdX+AMlGJPH75wWO/Jq6YHtxt1wYIg762krOBXfG6JsFSOIwIv5VrzGGRGjSPt9 +OXQyXrDDiQvsBT3rpzLNdDs7KMl2tZswwv7w9ujgud0cYnS1MOpn81rfPc73DvMg +xuhplofDx6fn3++PjVRU2FNiIVWyEoaxRjCeGPMBubKZYaYbQA6vYM4Z+ByG727B +AyAER3t7xmvYti/EoO2hv2HQk5zgcj/Oq3AJKhnt8LH8fnfm3TnYNM1htvXqhN05 +vsvhvm2PHfnA5qLlSr/3W0aI/U/PqfsFDCgyRV097sMIaKkmavb0Ue7aQ7lgtp0C +AwEAAaNTMFEwHQYDVR0OBBYEFDRKlCMowWR1rwxE0d1lTEQe5O71MB8GA1UdIwQY +MBaAFDRKlCMowWR1rwxE0d1lTEQe5O71MA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZI +hvcNAQELBQADggEBAKbCJ95EBpeuvF70KEt6QU70k/SH1NRvM9YzKryV0D975Jvu +HOSm9HgSTULeAUFZIa4oYyf3QUfVoI+2T/aQrfXA3gfrJWsHURkyNmiHOFAbYHqi +xA6i249G2GTEjc1+le/M2N2CcDKAmurW6vSGK4upXQbPd6KmnhHREX74zkWjnOa+ ++tibbSSOCT4Tmja2DbBxAPuivU9IB1g/hIUmbYQqKffQrBJA0658tz6w63a/Q7xN +pCvvbSgiMZ6qcVIcJkBT2IooYie+ax45pQECHthgIUcQAzfmIfqlU0Qfl8rDgAmn +0c1o6HQjKGU2aVGgSRuaaiHaSZjbPIZVS51sOoI= +-----END CERTIFICATE----- diff --git a/.ci/certs/testnode.crt b/.ci/certs/testnode.crt new file mode 100755 index 000000000..ff3bcb37f --- /dev/null +++ b/.ci/certs/testnode.crt @@ -0,0 +1,19 @@ +-----BEGIN CERTIFICATE----- +MIIDIjCCAgqgAwIBAgIUI4QU6jA1dYSCbdIA6oAb2TBEluowDQYJKoZIhvcNAQEL +BQAwNDEyMDAGA1UEAxMpRWxhc3RpYyBDZXJ0aWZpY2F0ZSBUb29sIEF1dG9nZW5l +cmF0ZWQgQ0EwHhcNMTkwMjEzMDcyMzEzWhcNMjIwMjEyMDcyMzEzWjATMREwDwYD +VQQDEwhpbnN0YW5jZTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAJeT +yOy6EAScZxrULKjHePciiz38grivCrhFFV+dThaRCcl3DhDzb9Eny5q5iEw3WvLQ +Rqmf01jncNIhaocTt66VqveXaMubbE8O0LcG6e4kpFO+JtnVF8JTARTc+ux/1uD6 +hO1VG/HItM7WQrQxh4hfB2u1AX2YQtoqEtXXEC+UHWfl4QzuzXjBnKCkO/L9/6Tf +yNFQWXxKnIiTs8Xm9sEhhSCBJPlLTQu+MX4vR2Uwj5XZmflDUr+ZTenl9qYxL6b3 +SWhh/qEl4GAj1+tS7ZZOxE0237mUh3IIFYSWSaMm8K2m/BYHkLNWL5B1dMic0lsv +osSoYrQuCef4HQMCitsCAwEAAaNNMEswHQYDVR0OBBYEFFMg4l1GLW8lYbwASY+r +YeWYRzIiMB8GA1UdIwQYMBaAFDRKlCMowWR1rwxE0d1lTEQe5O71MAkGA1UdEwQC +MAAwDQYJKoZIhvcNAQELBQADggEBAEQrgh1xALpumQTzsjxFRGque/vlKTgRs5Kh +xtgapr6wjIbdq7dagee+4yNOKzS5lGVXCgwrJlHESv9qY0uumT/33vK2uduJ7NAd +fR2ZzyBnhMX+mkYhmGrGYCTUMUIwOIQYa4Evis4W+LHmCIDG03l7gLHfdIBe9VMO +pDZum8f6ng0MM49s8/rXODNYKw8kFyUhnfChqMi/2yggb1uUIfKlJJIchkgYjE13 +zuC+fjo029Pq1jeMIdxugLf/7I/8NiW1Yj9aCXevUXG1qzHFEuKAinBXYOZO/vWS +LaEqOhwrzNynwgGpYAr7Rfgv4AflltYIIav4PZT03P7fbyAAf8s= +-----END CERTIFICATE----- diff --git a/.ci/certs/testnode.key b/.ci/certs/testnode.key new file mode 100755 index 000000000..c35b4bc83 --- /dev/null +++ b/.ci/certs/testnode.key @@ -0,0 +1,27 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIEpQIBAAKCAQEAl5PI7LoQBJxnGtQsqMd49yKLPfyCuK8KuEUVX51OFpEJyXcO +EPNv0SfLmrmITDda8tBGqZ/TWOdw0iFqhxO3rpWq95doy5tsTw7Qtwbp7iSkU74m +2dUXwlMBFNz67H/W4PqE7VUb8ci0ztZCtDGHiF8Ha7UBfZhC2ioS1dcQL5QdZ+Xh +DO7NeMGcoKQ78v3/pN/I0VBZfEqciJOzxeb2wSGFIIEk+UtNC74xfi9HZTCPldmZ ++UNSv5lN6eX2pjEvpvdJaGH+oSXgYCPX61Ltlk7ETTbfuZSHcggVhJZJoybwrab8 +FgeQs1YvkHV0yJzSWy+ixKhitC4J5/gdAwKK2wIDAQABAoIBAQCRFTJna/xy/WUu +59FLR4qAOj8++JgCwACpue4oU7/vl6nffSYokWoAr2+RzG4qTX2vFi3cpA8+dGCn +sLZvTi8tWzKGxBTZdg2oakzaMzLr74SeZ052iCGyrZJGbvF6Ny7srr1XEXSq6+os +ZCb6pMHOhO7saBdiKMAsY8MdjTl/33AduuE6ztqv+L92xTr2g4QlbT1KvWlEgppU +k4Gy7zdETkPBTSH/17ZwyGJoJICIAhbL4IpmOM4dPIg8nFkVPPpy6p0z4uGjtgnK +nreZ2EKMzCafBaHn7A77gpi0OrQdl6pe0fsGqv/323YjCJPbwwl5TsoNq44DzwiX +3M7XiVJxAoGBAOCne56vdN4uZmCgLVGT2JSUNVPOu4bfjrxWH6cslzrPT2Zhp3lO +M4axZ3gmcervV252YEZXntXDHHCSfrECllRN1WFD63XmyQ/CkhuvZkkeRHfzL1TE +EdqHOTqs4sRETZ7+RITFC81DZQkWWOKeyXMjyPBqd7RnThQHijB1c8Y5AoGBAKy6 +CVKBx+zz5crVD0tz4UhOmz1wRNN0CL0l+FXRuFSgbzMIvwpfiqe25crgeLHe2M2/ +TogdWbjZ2nUZQTzoRsSkQ6cKHpj+G/gWurp/UcHHXFVwgLSPF7c3KHDtiYq7Vqw0 +bvmhM03LI6+ZIPRV7hLBr7WP7UmpAiREMF7tTnmzAoGBAIkx3w3WywFQxtblmyeB +qbd7F2IaE23XoxyjX+tBEQ4qQqwcoSE0v8TXHIBEwjceeX+NLVhn9ClJYVniLRq+ +oL3VVqVyzB4RleJZCc98e3PV1yyFx/b1Uo3pHOsXX9lKeTjKwV9v0rhFGzPEgP3M +yOvXA8TG0FnM6OLUg/D6GX0JAoGAMuHS4TVOGeV3ahr9mHKYiN5vKNgrzka+VEod +L9rJ/FQOrfADpyCiDen5I5ygsXU+VM3oanyK88NpcVlxOGoMft0M+OYoQVWKE7lO +ZKYhBX6fGqQ7pfUJPXXIOgwfmni5fZ0sm+j63g3bg10OsiumKGxaQJgXhL1+3gQg +Y7ZwibUCgYEAlZoFFvkMLjpOSaHk1z5ZZnt19X0QUIultBwkumSqMPm+Ks7+uDrx +thGUCoz4ecr/ci4bIUY7mB+zfAbqnBOMxreJqCRbAIuRypo1IlWkTp8DywoDOfMW +NfzjVmzJ7EJu44nGmVAi1jw4Pbseivvi1ujMCoPgaE8I1uSh144bwN8= +-----END RSA PRIVATE KEY----- diff --git a/.ci/docker-compose.yml b/.ci/docker-compose.yml index d8a3b6c38..ba8b0538e 100644 --- a/.ci/docker-compose.yml +++ b/.ci/docker-compose.yml @@ -1,6 +1,6 @@ version: '3.2' services: - client: + client-oss: image: docker.elastic.co/clients/elasticsearch-js:${NODE_JS_VERSION:-10} build: context: .. @@ -8,7 +8,7 @@ services: args: NODE_JS_VERSION: ${NODE_JS_VERSION:-10} environment: - - "TEST_ES_SERVER=http://elasticsearch:9200" + - "TEST_ES_SERVER=http://elasticsearch-oss:9200" volumes: - ..:/usr/src/app # This will mount the node_modules directory @@ -16,15 +16,17 @@ services: - /usr/src/app/node_modules - esvol:/tmp networks: - - esnet + - esnet-oss depends_on: - - elasticsearch - elasticsearch: + - elasticsearch-oss + + elasticsearch-oss: image: docker.elastic.co/elasticsearch/elasticsearch:${ELASTICSEARCH_VERSION:-7.0.0-beta1} + - elasticsearch volumes: - esvol:/tmp networks: - - esnet + - esnet-oss environment: - path.repo=/tmp - "repositories.url.allowed_urls=http://snapshot.*" @@ -32,7 +34,59 @@ services: - bootstrap.memory_lock=false - "discovery.type=single-node" - "ES_JAVA_OPTS=-Xms512m -Xmx512m" + + client-platinum: + image: docker.elastic.co/clients/elasticsearch-js:${NODE_JS_VERSION:-10} + build: + context: .. + dockerfile: .ci/Dockerfile + args: + NODE_JS_VERSION: ${NODE_JS_VERSION:-10} + environment: + - "TEST_ES_SERVER=https://elastic:changeme@elasticsearch-platinum:9200" + volumes: + - ..:/usr/src/app + # This will mount the node_modules directory + # to the host machine using the buildtime directory. + - /usr/src/app/node_modules + - esvol:/tmp + networks: + - esnet-platinum + depends_on: + - elasticsearch-platinum + + elasticsearch-platinum: + image: docker.elastic.co/elasticsearch/elasticsearch:${ELASTICSEARCH_VERSION:-7.0.0-beta1} + ports: + - "9200:9200" + networks: + - esnet-platinum + environment: + - "node.attr.testattr=test" + - "path.repo=/tmp" + - "repositories.url.allowed_urls=http://snapshot.*" + - "discovery.type=single-node" + - "ES_JAVA_OPTS=-Xms1g -Xmx1g" + - "ELASTIC_PASSWORD=changeme" + - "xpack.security.enabled=true" + - "xpack.license.self_generated.type=trial" + - "xpack.security.http.ssl.enabled=true" + - "xpack.security.http.ssl.verification_mode=certificate" + - "xpack.security.http.ssl.key=certs/testnode.key" + - "xpack.security.http.ssl.certificate=certs/testnode.crt" + - "xpack.security.http.ssl.certificate_authorities=certs/ca.crt" + - "xpack.security.transport.ssl.enabled=true" + - "xpack.security.transport.ssl.key=certs/testnode.key" + - "xpack.security.transport.ssl.certificate=certs/testnode.crt" + - "xpack.security.transport.ssl.certificate_authorities=certs/ca.crt" + volumes: + - "./certs/testnode.crt:/usr/share/elasticsearch/config/certs/testnode.crt" + - "./certs/testnode.key:/usr/share/elasticsearch/config/certs/testnode.key" + - "./certs/ca.crt:/usr/share/elasticsearch/config/certs/ca.crt" + networks: - esnet: + # we need two networks otherwise the two ES instances will join each other + esnet-oss: + esnet-platinum: volumes: esvol: diff --git a/.ci/run-tests b/.ci/run-tests index e0deadb62..cccda74c3 100755 --- a/.ci/run-tests +++ b/.ci/run-tests @@ -10,4 +10,6 @@ # - $NODE_JS_VERSION # -ELASTICSEARCH_VERSION=${ELASTICSEARCH_VERSION} NODE_JS_VERSION=${NODE_JS_VERSION} docker-compose -f .ci/docker-compose.yml run client +ELASTICSEARCH_VERSION=${ELASTICSEARCH_VERSION} NODE_JS_VERSION=${NODE_JS_VERSION} docker-compose -f .ci/docker-compose.yml run client-oss + +ELASTICSEARCH_VERSION=${ELASTICSEARCH_VERSION} NODE_JS_VERSION=${NODE_JS_VERSION} docker-compose -f .ci/docker-compose.yml run client-platinum diff --git a/api/api/ml.delete_calendar_event.js b/api/api/ml.delete_calendar_event.js index 70fd61e2a..a8b7b8e4c 100644 --- a/api/api/ml.delete_calendar_event.js +++ b/api/api/ml.delete_calendar_event.js @@ -62,7 +62,7 @@ function buildMlDeleteCalendarEvent (opts) { } // check required url components - if ((params['event_id'] != null || params['eventId'] != null) && ((params['calendar_id'] == null || params['calendarId']))) { + if ((params['event_id'] != null || params['eventId'] != null) && ((params['calendar_id'] == null && params['calendarId'] == null))) { return callback( new ConfigurationError('Missing required parameter of the url: calendar_id'), result diff --git a/api/api/ml.delete_calendar_job.js b/api/api/ml.delete_calendar_job.js index ff6315180..ea412b9b4 100644 --- a/api/api/ml.delete_calendar_job.js +++ b/api/api/ml.delete_calendar_job.js @@ -62,7 +62,7 @@ function buildMlDeleteCalendarJob (opts) { } // check required url components - if ((params['job_id'] != null || params['jobId'] != null) && ((params['calendar_id'] == null || params['calendarId']))) { + if ((params['job_id'] != null || params['jobId'] != null) && ((params['calendar_id'] == null && params['calendarId'] == null))) { return callback( new ConfigurationError('Missing required parameter of the url: calendar_id'), result diff --git a/api/api/ml.delete_forecast.js b/api/api/ml.delete_forecast.js index b51697d73..d07337780 100644 --- a/api/api/ml.delete_forecast.js +++ b/api/api/ml.delete_forecast.js @@ -60,7 +60,7 @@ function buildMlDeleteForecast (opts) { } // check required url components - if ((params['forecast_id'] != null || params['forecastId'] != null) && ((params['job_id'] == null || params['jobId']))) { + if ((params['forecast_id'] != null || params['forecastId'] != null) && ((params['job_id'] == null && params['jobId'] == null))) { return callback( new ConfigurationError('Missing required parameter of the url: job_id'), result diff --git a/api/api/ml.delete_model_snapshot.js b/api/api/ml.delete_model_snapshot.js index 0cf2877b2..e77f38f36 100644 --- a/api/api/ml.delete_model_snapshot.js +++ b/api/api/ml.delete_model_snapshot.js @@ -62,7 +62,7 @@ function buildMlDeleteModelSnapshot (opts) { } // check required url components - if ((params['snapshot_id'] != null || params['snapshotId'] != null) && ((params['job_id'] == null || params['jobId']))) { + if ((params['snapshot_id'] != null || params['snapshotId'] != null) && ((params['job_id'] == null && params['jobId'] == null))) { return callback( new ConfigurationError('Missing required parameter of the url: job_id'), result diff --git a/api/api/ml.find_file_structure.js b/api/api/ml.find_file_structure.js index c5a554fb4..7463010ad 100644 --- a/api/api/ml.find_file_structure.js +++ b/api/api/ml.find_file_structure.js @@ -109,7 +109,7 @@ function buildMlFindFileStructure (opts) { const request = { method, path, - body: body || '', + bulkBody: body, querystring } diff --git a/api/api/ml.get_buckets.js b/api/api/ml.get_buckets.js index 2a2175b22..81223c004 100644 --- a/api/api/ml.get_buckets.js +++ b/api/api/ml.get_buckets.js @@ -70,7 +70,7 @@ function buildMlGetBuckets (opts) { } // check required url components - if (params['timestamp'] != null && ((params['job_id'] == null || params['jobId']))) { + if (params['timestamp'] != null && ((params['job_id'] == null && params['jobId'] == null))) { return callback( new ConfigurationError('Missing required parameter of the url: job_id'), result diff --git a/api/api/ml.get_model_snapshots.js b/api/api/ml.get_model_snapshots.js index 7a56d6ab6..8c1d74dd1 100644 --- a/api/api/ml.get_model_snapshots.js +++ b/api/api/ml.get_model_snapshots.js @@ -62,7 +62,7 @@ function buildMlGetModelSnapshots (opts) { } // check required url components - if ((params['snapshot_id'] != null || params['snapshotId'] != null) && ((params['job_id'] == null || params['jobId']))) { + if ((params['snapshot_id'] != null || params['snapshotId'] != null) && ((params['job_id'] == null && params['jobId'] == null))) { return callback( new ConfigurationError('Missing required parameter of the url: job_id'), result diff --git a/api/api/ml.put_calendar_job.js b/api/api/ml.put_calendar_job.js index 55b005f37..f3ad75956 100644 --- a/api/api/ml.put_calendar_job.js +++ b/api/api/ml.put_calendar_job.js @@ -62,7 +62,7 @@ function buildMlPutCalendarJob (opts) { } // check required url components - if ((params['job_id'] != null || params['jobId'] != null) && ((params['calendar_id'] == null || params['calendarId']))) { + if ((params['job_id'] != null || params['jobId'] != null) && ((params['calendar_id'] == null && params['calendarId'] == null))) { return callback( new ConfigurationError('Missing required parameter of the url: calendar_id'), result diff --git a/api/api/ml.revert_model_snapshot.js b/api/api/ml.revert_model_snapshot.js index 1682f4012..6f34bb7d3 100644 --- a/api/api/ml.revert_model_snapshot.js +++ b/api/api/ml.revert_model_snapshot.js @@ -58,7 +58,7 @@ function buildMlRevertModelSnapshot (opts) { } // check required url components - if ((params['snapshot_id'] != null || params['snapshotId'] != null) && ((params['job_id'] == null || params['jobId']))) { + if ((params['snapshot_id'] != null || params['snapshotId'] != null) && ((params['job_id'] == null && params['jobId'] == null))) { return callback( new ConfigurationError('Missing required parameter of the url: job_id'), result diff --git a/api/api/ml.update_model_snapshot.js b/api/api/ml.update_model_snapshot.js index 7e88a3dd0..c495099a1 100644 --- a/api/api/ml.update_model_snapshot.js +++ b/api/api/ml.update_model_snapshot.js @@ -63,7 +63,7 @@ function buildMlUpdateModelSnapshot (opts) { } // check required url components - if ((params['snapshot_id'] != null || params['snapshotId'] != null) && ((params['job_id'] == null || params['jobId']))) { + if ((params['snapshot_id'] != null || params['snapshotId'] != null) && ((params['job_id'] == null && params['jobId'] == null))) { return callback( new ConfigurationError('Missing required parameter of the url: job_id'), result diff --git a/api/api/security.get_privileges.js b/api/api/security.get_privileges.js index 5bd95cf1a..2e67a3e3a 100644 --- a/api/api/security.get_privileges.js +++ b/api/api/security.get_privileges.js @@ -80,7 +80,13 @@ function buildSecurityGetPrivileges (opts) { var path = '' - path = '/' + '_security' + '/' + 'privilege' + '/' + encodeURIComponent(application) + '/' + encodeURIComponent(name) + if (application && name) { + path = '/' + '_security' + '/' + 'privilege' + '/' + encodeURIComponent(application) + '/' + encodeURIComponent(name) + } else if (application) { + path = '/' + '_security' + '/' + 'privilege' + '/' + encodeURIComponent(application) + } else { + path = '/' + '_security' + '/' + 'privilege' + } // build request object const request = { diff --git a/api/api/xpack.watcher.ack_watch.js b/api/api/xpack.watcher.ack_watch.js index 01e663de3..3bd1233d9 100644 --- a/api/api/xpack.watcher.ack_watch.js +++ b/api/api/xpack.watcher.ack_watch.js @@ -56,7 +56,7 @@ function buildXpackWatcherAckWatch (opts) { } // check required url components - if ((params['action_id'] != null || params['actionId'] != null) && ((params['watch_id'] == null || params['watchId']))) { + if ((params['action_id'] != null || params['actionId'] != null) && ((params['watch_id'] == null && params['watchId'] == null))) { return callback( new ConfigurationError('Missing required parameter of the url: watch_id'), result diff --git a/lib/Transport.js b/lib/Transport.js index f16907bec..47a111110 100644 --- a/lib/Transport.js +++ b/lib/Transport.js @@ -106,7 +106,7 @@ class Transport { return callback(err, result) } } - headers['Content-Type'] = 'application/json' + headers['Content-Type'] = headers['Content-Type'] || 'application/json' if (compression === 'gzip') { if (isStream(params.body) === false) { @@ -131,7 +131,7 @@ class Transport { } else { params.body = params.bulkBody } - headers['Content-Type'] = 'application/x-ndjson' + headers['Content-Type'] = headers['Content-Type'] || 'application/x-ndjson' if (isStream(params.body) === false) { headers['Content-Length'] = '' + Buffer.byteLength(params.body) } diff --git a/package.json b/package.json index 081e3e0da..5f362c916 100644 --- a/package.json +++ b/package.json @@ -26,7 +26,7 @@ "lint:fix": "standard --fix", "generate": "node scripts/run.js", "elasticsearch": "./scripts/es-docker.sh", - "ci": "./scripts/wait-cluster.sh && npm test && npm run test:integration" + "ci": "npm test && npm run test:integration" }, "author": { "name": "Tomas Della Vedova", diff --git a/scripts/es-docker-platinum.sh b/scripts/es-docker-platinum.sh new file mode 100755 index 000000000..8ab34962a --- /dev/null +++ b/scripts/es-docker-platinum.sh @@ -0,0 +1,32 @@ +#!/bin/bash + +repo=$(pwd) +testnodecrt="/.ci/certs/testnode.crt" +testnodekey="/.ci/certs/testnode.key" +cacrt="/.ci/certs/ca.crt" + +exec docker run \ + --rm \ + -e "node.attr.testattr=test" \ + -e "path.repo=/tmp" \ + -e "repositories.url.allowed_urls=http://snapshot.*" \ + -e "discovery.type=single-node" \ + -e "ES_JAVA_OPTS=-Xms1g -Xmx1g" \ + -e "ELASTIC_PASSWORD=changeme" \ + -e "xpack.security.enabled=true" \ + -e "xpack.license.self_generated.type=trial" \ + -e "xpack.security.http.ssl.enabled=true" \ + -e "xpack.security.http.ssl.verification_mode=certificate" \ + -e "xpack.security.http.ssl.key=certs/testnode.key" \ + -e "xpack.security.http.ssl.certificate=certs/testnode.crt" \ + -e "xpack.security.http.ssl.certificate_authorities=certs/ca.crt" \ + -e "xpack.security.transport.ssl.enabled=true" \ + -e "xpack.security.transport.ssl.key=certs/testnode.key" \ + -e "xpack.security.transport.ssl.certificate=certs/testnode.crt" \ + -e "xpack.security.transport.ssl.certificate_authorities=certs/ca.crt" \ + -v "$repo$testnodecrt:/usr/share/elasticsearch/config/certs/testnode.crt" \ + -v "$repo$testnodekey:/usr/share/elasticsearch/config/certs/testnode.key" \ + -v "$repo$cacrt:/usr/share/elasticsearch/config/certs/ca.crt" \ + -p 9200:9200 \ + docker.elastic.co/elasticsearch/elasticsearch:7.0.0-beta1 + # docker.elastic.co/elasticsearch/elasticsearch:6.6.0 diff --git a/scripts/utils/generate.js b/scripts/utils/generate.js index 3ae60ef0f..0ee925ab6 100644 --- a/scripts/utils/generate.js +++ b/scripts/utils/generate.js @@ -34,6 +34,7 @@ const ndjsonApi = [ 'bulk', 'msearch', 'msearch_template', + 'ml.find_file_structure', 'monitoring.bulk' ] @@ -448,7 +449,7 @@ function genUrlValidation (paths, api) { if (chunks[i] === camelCased) { code += `params['${chunks[i]}'] == null${i === len - 1 ? '' : ' || '}` } else { - code += `(params['${chunks[i]}'] == null || params['${camelCased}'])${i === len - 1 ? '' : ' || '}` + code += `(params['${chunks[i]}'] == null && params['${camelCased}'] == null)${i === len - 1 ? '' : ' || '}` } } code += `)) { diff --git a/test/integration/helper.js b/test/integration/helper.js new file mode 100644 index 000000000..8d4479744 --- /dev/null +++ b/test/integration/helper.js @@ -0,0 +1,64 @@ +'use strict' + +const esDefaultRoles = [ + 'apm_system', + 'apm_user', + 'beats_admin', + 'beats_system', + 'code_admin', + 'code_user', + 'ingest_admin', + 'kibana_dashboard_only_user', + 'kibana_system', + 'kibana_user', + 'logstash_admin', + 'logstash_system', + 'machine_learning_admin', + 'machine_learning_user', + 'monitoring_user', + 'remote_monitoring_agent', + 'remote_monitoring_collector', + 'reporting_user', + 'rollup_admin', + 'rollup_user', + 'snapshot_user', + 'superuser', + 'transport_client', + 'watcher_admin', + 'watcher_user' +] + +const esDefaultUsers = [ + 'apm_system', + 'beats_system', + 'elastic', + 'logstash_system', + 'kibana', + 'remote_monitoring_user' +] + +function runInParallel (client, operation, options) { + if (options.length === 0) return Promise.resolve() + const operations = options.map(opts => { + const api = delve(client, operation).bind(client) + return api(opts) + }) + + return Promise.all(operations) +} + +// code from https://github.com/developit/dlv +// needed to support an edge case: `a\.b` +// where `a.b` is a single field: { 'a.b': true } +function delve (obj, key, def, p) { + p = 0 + // handle the key with a dot inside that is not a part of the path + // and removes the backslashes from the key + key = key.split + ? key.split(/(? k.replace(/\\/g, '')) + : key.replace(/\\/g, '') + while (obj && p < key.length) obj = obj[key[p++]] + return (obj === undefined || p < key.length) ? def : obj +} + +module.exports = { runInParallel, esDefaultRoles, esDefaultUsers, delve } diff --git a/test/integration/index.js b/test/integration/index.js index e3625a8f6..5f136413e 100644 --- a/test/integration/index.js +++ b/test/integration/index.js @@ -6,7 +6,6 @@ const { join, sep } = require('path') const yaml = require('js-yaml') const Git = require('simple-git') const ora = require('ora') -const minimist = require('minimist') const tap = require('tap') const { Client } = require('../../index') const TestRunner = require('./test-runner') @@ -14,7 +13,7 @@ const TestRunner = require('./test-runner') const esRepo = 'https://github.com/elastic/elasticsearch.git' const esFolder = join(__dirname, '..', '..', 'elasticsearch') const yamlFolder = join(esFolder, 'rest-api-spec', 'src', 'main', 'resources', 'rest-api-spec', 'test') -// const xPackYamlFolder = join(esFolder, 'x-pack', 'plugin', 'src', 'test', 'resources', 'rest-api-spec', 'test') +const xPackYamlFolder = join(esFolder, 'x-pack', 'plugin', 'src', 'test', 'resources', 'rest-api-spec', 'test') const customSkips = [ // skipping because we are booting ES with `discovery.type=single-node` // and this test will fail because of this configuration @@ -23,6 +22,31 @@ const customSkips = [ // which triggers a retry and the node to be marked as dead 'search.aggregation/240_max_buckets.yml' ] +const platinumBlackList = { + // file path: test name + 'cat.aliases/10_basic.yml': 'Empty cluster', + 'index/10_with_id.yml': 'Index with ID', + 'indices.get_alias/10_basic.yml': 'Get alias against closed indices', + 'indices.get_alias/20_empty.yml': 'Check empty aliases when getting all aliases via /_alias', + // https://github.com/elastic/elasticsearch/pull/39400 + 'ml/jobs_crud.yml': 'Test put job with id that is already taken', + // TODO: investigate why this is failing + 'monitoring/bulk/10_basic.yml': '*', + 'monitoring/bulk/20_privileges.yml': '*', + 'license/20_put_license.yml': '*', + 'snapshot/10_basic.yml': '*', + // the body is correct, but the regex is failing + 'sql/sql.yml': 'Getting textual representation', + // we are setting two certificates in the docker config + 'ssl/10_basic.yml': '*', + // docker issue? + 'watcher/execute_watch/60_http_input.yml': '*', + // the checks are correct, but for some reason the test is failing on js side + // I bet is because the backslashes in the rg + 'watcher/execute_watch/70_invalid.yml': '*', + 'watcher/put_watch/10_basic.yml': '*', + 'xpack/15_basic.yml': '*' +} function Runner (opts) { if (!(this instanceof Runner)) { @@ -32,14 +56,39 @@ function Runner (opts) { assert(opts.node, 'Missing base node') this.bailout = opts.bailout - this.client = new Client({ node: opts.node }) + const options = { node: opts.node } + if (opts.isPlatinum) { + options.ssl = { + // NOTE: this path works only if we run + // the suite with npm scripts + ca: readFileSync('.ci/certs/ca.crt', 'utf8'), + rejectUnauthorized: false + } + } + this.client = new Client(options) this.log = ora('Loading yaml suite').start() } +Runner.prototype.waitCluster = function (callback, times = 0) { + this.log.text = 'Waiting for ElasticSearch' + this.client.cluster.health( + { waitForStatus: 'green', timeout: '50s' }, + (err, res) => { + if (++times < 10) { + setTimeout(() => { + this.waitCluster(callback, times) + }, 5000) + } else { + callback(err) + } + } + ) +} + /** * Runs the test suite */ -Runner.prototype.start = function () { +Runner.prototype.start = function (opts) { const parse = this.parse.bind(this) const client = this.client @@ -53,36 +102,36 @@ Runner.prototype.start = function () { // console.log() // }) - // Get the build hash of Elasticsearch - client.info((err, { body }) => { + this.waitCluster(err => { if (err) { this.log.fail(err.message) process.exit(1) } - const { number: version, build_hash: sha } = body.version + // Get the build hash of Elasticsearch + client.info((err, { body }) => { + if (err) { + this.log.fail(err.message) + process.exit(1) + } + const { number: version, build_hash: sha } = body.version - // Set the repository to the given sha and run the test suite - this.withSHA(sha, () => { - this.log.succeed('Done!') - runTest.call(this, version) + // Set the repository to the given sha and run the test suite + this.withSHA(sha, () => { + this.log.succeed(`Testing ${opts.isPlatinum ? 'platinum' : 'oss'} api...`) + runTest.call(this, version) + }) }) - - // client.xpack.license.postStartTrial({ acknowledge: true }, (err, { body }) => { - // if (err) { - // this.log.fail(err.message) - // return - // } - // }) }) function runTest (version) { const files = [] .concat(getAllFiles(yamlFolder)) - // .concat(getAllFiles(xPackYamlFolder)) + .concat(opts.isPlatinum ? getAllFiles(xPackYamlFolder) : []) .filter(t => !/(README|TODO)/g.test(t)) files.forEach(runTestFile.bind(this)) function runTestFile (file) { + // if (!file.endsWith('watcher/execute_watch/70_invalid.yml')) return for (var i = 0; i < customSkips.length; i++) { if (file.endsWith(customSkips[i])) return } @@ -94,7 +143,7 @@ Runner.prototype.start = function () { // every document is separated by '---', so we split on the separator // and then we remove the empty strings, finally we parse them const tests = data - .split('---') + .split('\n---\n') .map(s => s.trim()) .filter(Boolean) .map(parse) @@ -111,9 +160,26 @@ Runner.prototype.start = function () { tests.forEach(test => { const name = Object.keys(test)[0] if (name === 'setup' || name === 'teardown') return + // should skip the test inside `platinumBlackList` + // if we are testing the platinum apis + if (opts.isPlatinum) { + const list = Object.keys(platinumBlackList) + for (i = 0; i < list.length; i++) { + if (file.endsWith(list[i]) && (name === platinumBlackList[list[i]] || platinumBlackList[list[i]] === '*')) { + const testName = file.slice(file.indexOf(`${sep}elasticsearch${sep}`)) + ' / ' + name + tap.skip(`Skipping test ${testName} because is blacklisted in the platinum test`) + return + } + } + } // create a subtest for the specific folder + test file + test name tap1.test(name, { jobs: 1, bail: this.bailout }, tap2 => { - const testRunner = TestRunner({ client, version, tap: tap2 }) + const testRunner = TestRunner({ + client, + version, + tap: tap2, + isPlatinum: file.includes('x-pack') + }) testRunner.run(setupTest, test[name], teardownTest, () => tap2.end()) }) }) @@ -245,19 +311,13 @@ Runner.prototype.createFolder = function (name) { } if (require.main === module) { - const opts = minimist(process.argv.slice(2), { - string: ['node', 'version'], - boolean: ['bailout'], - default: { - // node: 'http://elastic:passw0rd@localhost:9200', - node: process.env.TEST_ES_SERVER || 'http://localhost:9200', - version: '7.0', - bailout: false - } - }) - + const url = process.env.TEST_ES_SERVER || 'http://localhost:9200' + const opts = { + node: url, + isPlatinum: url.indexOf('@') > -1 + } const runner = Runner(opts) - runner.start() + runner.start(opts) } const getAllFiles = dir => diff --git a/test/integration/test-runner.js b/test/integration/test-runner.js index 2f8fa0b97..231fcd803 100644 --- a/test/integration/test-runner.js +++ b/test/integration/test-runner.js @@ -3,15 +3,20 @@ const t = require('tap') const semver = require('semver') const workq = require('workq') +const helper = require('./helper') const { ConfigurationError } = require('../../lib/errors') +const { delve } = helper + const supportedFeatures = [ 'gtelte', 'regex', 'benchmark', 'stash_in_path', 'groovy_scripting', - 'headers' + 'headers', + 'transform_and_set', + 'catch_unauthorized' ] function TestRunner (opts) { @@ -25,6 +30,7 @@ function TestRunner (opts) { this.response = null this.stash = new Map() this.tap = opts.tap || t + this.isPlatinum = opts.isPlatinum this.q = opts.q || workq() } @@ -55,14 +61,148 @@ TestRunner.prototype.cleanup = function (q, done) { q.add((q, done) => { this.client.snapshot.delete({ repository: '*', snapshot: '*' }, { ignore: 404 }, err => { - this.tap.error(err, 'should not error:snapshot.delete') + this.tap.error(err, 'should not error: snapshot.delete') done() }) }) q.add((q, done) => { this.client.snapshot.deleteRepository({ repository: '*' }, { ignore: 404 }, err => { - this.tap.error(err, 'should not error:snapshot.deleteRepository') + this.tap.error(err, 'should not error: snapshot.deleteRepository') + done() + }) + }) + + done() +} + +/** + * Runs some additional API calls to prepare ES for the Platinum test, + * This set of calls should be executed before the final clenup. + * @param {queue} + * @param {function} done + */ +TestRunner.prototype.cleanupPlatinum = function (q, done) { + this.tap.comment('Platinum Cleanup') + + q.add((q, done) => { + this.client.security.getRole((err, { body }) => { + this.tap.error(err, 'should not error: security.getRole') + const roles = Object.keys(body).filter(n => helper.esDefaultRoles.indexOf(n) === -1) + helper.runInParallel( + this.client, 'security.deleteRole', + roles.map(r => ({ name: r, refresh: 'wait_for' })) + ) + .then(() => done()) + .catch(err => this.tap.error(err, 'should not error: security.deleteRole')) + }) + }) + + q.add((q, done) => { + this.client.security.getUser((err, { body }) => { + this.tap.error(err, 'should not error: security.getUser') + const users = Object.keys(body).filter(n => helper.esDefaultUsers.indexOf(n) === -1) + helper.runInParallel( + this.client, 'security.deleteUser', + users.map(r => ({ username: r, refresh: 'wait_for' })) + ) + .then(() => done()) + .catch(err => this.tap.error(err, 'should not error: security.deleteUser')) + }) + }) + + q.add((q, done) => { + this.client.security.getPrivileges((err, { body }) => { + this.tap.error(err, 'should not error: security.getPrivileges') + const privileges = [] + Object.keys(body).forEach(app => { + Object.keys(body[app]).forEach(priv => { + privileges.push({ + name: body[app][priv].name, + application: body[app][priv].application, + refresh: 'wait_for' + }) + }) + }) + helper.runInParallel(this.client, 'security.deletePrivileges', privileges) + .then(() => done()) + .catch(err => this.tap.error(err, 'should not error: security.deletePrivileges')) + }) + }) + + q.add((q, done) => { + this.client.ml.stopDatafeed({ datafeedId: '*', force: true }, err => { + this.tap.error(err, 'should not error: ml.stopDatafeed') + this.client.ml.getDatafeeds({ datafeedId: '*' }, (err, { body }) => { + this.tap.error(err, 'should error: not ml.getDatafeeds') + const feeds = body.datafeeds.map(f => f.datafeed_id) + helper.runInParallel( + this.client, 'ml.deleteDatafeed', + feeds.map(f => ({ datafeedId: f })) + ) + .then(() => done()) + .catch(err => this.tap.error(err, 'should not error: ml.deleteDatafeed')) + }) + }) + }) + + q.add((q, done) => { + this.client.ml.closeJob({ jobId: '*', force: true }, err => { + this.tap.error(err, 'should not error: ml.closeJob') + this.client.ml.getJobs({ jobId: '*' }, (err, { body }) => { + this.tap.error(err, 'should not error: ml.getJobs') + const jobs = body.jobs.map(j => j.job_id) + helper.runInParallel( + this.client, 'ml.deleteJob', + jobs.map(j => ({ jobId: j, waitForCompletion: true, force: true })) + ) + .then(() => done()) + .catch(err => this.tap.error(err, 'should not error: ml.deleteJob')) + }) + }) + }) + + q.add((q, done) => { + this.client.xpack.rollup.getJobs({ id: '_all' }, (err, { body }) => { + this.tap.error(err, 'should not error: rollup.getJobs') + const jobs = body.jobs.map(j => j.config.id) + helper.runInParallel( + this.client, 'xpack.rollup.stopJob', + jobs.map(j => ({ id: j, waitForCompletion: true })) + ) + .then(() => helper.runInParallel( + this.client, 'xpack.rollup.deleteJob', + jobs.map(j => ({ id: j })) + )) + .then(() => done()) + .catch(err => this.tap.error(err, 'should not error: rollup.stopJob/deleteJob')) + }) + }) + + q.add((q, done) => { + this.client.tasks.list((err, { body }) => { + this.tap.error(err, 'should not error: tasks.list') + const tasks = Object.keys(body.nodes) + .reduce((acc, node) => { + const { tasks } = body.nodes[node] + Object.keys(tasks).forEach(id => { + if (tasks[id].cancellable) acc.push(id) + }) + return acc + }, []) + + helper.runInParallel( + this.client, 'tasks.cancel', + tasks.map(id => ({ taskId: id })) + ) + .then(() => done()) + .catch(err => this.tap.error(err, 'should not error: tasks.cancel')) + }) + }) + + q.add((q, done) => { + this.client.indices.delete({ index: '.ml-*' }, { ignore: 404 }, err => { + this.tap.error(err, 'should not error: indices.delete (ml indices)') done() }) }) @@ -77,7 +217,7 @@ TestRunner.prototype.cleanup = function (q, done) { * - the actual test * - teardown * - cleanup - * Internally uses a queue to guarantee the order of the test sections. +* Internally uses a queue to guarantee the order of the test sections. * @param {object} setup (null if not needed) * @param {object} test * @oaram {object} teardown (null if not needed) @@ -92,6 +232,20 @@ TestRunner.prototype.run = function (setup, test, teardown, end) { return end() } + if (this.isPlatinum) { + this.tap.comment('Creating x-pack user') + // Some platinum test requires this user + this.q.add((q, done) => { + this.client.security.putUser({ + username: 'x_pack_rest_user', + body: { password: 'x-pack-test-password', roles: ['superuser'] } + }, (err, { body }) => { + this.tap.error(err, 'should not error: security.putUser') + done() + }) + }) + } + if (setup) { this.q.add((q, done) => { this.exec('Setup', setup, q, done) @@ -108,6 +262,12 @@ TestRunner.prototype.run = function (setup, test, teardown, end) { }) } + if (this.isPlatinum) { + this.q.add((q, done) => { + this.cleanupPlatinum(q, done) + }) + } + this.q.add((q, done) => { this.cleanup(q, done) }) @@ -211,11 +371,33 @@ TestRunner.prototype.fillStashedValues = function (obj) { // iterate every key of the object for (const key in obj) { const val = obj[key] + // if the key value is a string, and the string includes '${' + // that we must update the content of '${...}'. + // eg: 'Basic ${auth}' we search the stahed value 'auth' + // and the resulting value will be 'Basic valueOfAuth' + if (typeof val === 'string' && val.includes('${')) { + const start = val.indexOf('${') + const end = val.indexOf('}', val.indexOf('${')) + const stashedKey = val.slice(start + 2, end) + const stashed = this.stash.get(stashedKey) + obj[key] = val.slice(0, start) + stashed + val.slice(end + 1) + continue + } + // handle json strings, eg: '{"hello":"$world"}' + if (typeof val === 'string' && val.includes('"$')) { + const start = val.indexOf('"$') + const end = val.indexOf('"', start + 1) + const stashedKey = val.slice(start + 2, end) + const stashed = '"' + this.stash.get(stashedKey) + '"' + obj[key] = val.slice(0, start) + stashed + val.slice(end + 1) + continue + } // if the key value is a string, and the string includes '$' // we run the "update value" code if (typeof val === 'string' && val.includes('$')) { // update the key value obj[key] = getStashedValues.call(this, val) + continue } // go deep in the object @@ -261,6 +443,26 @@ TestRunner.prototype.set = function (key, name) { return this } +/** + * Applies a given transformation and stashes the result. + * @param {string} the name to identify the stashed value + * @param {string} the transformation function as string + * @returns {TestRunner} + */ +TestRunner.prototype.transform_and_set = function (name, transform) { + if (/base64EncodeCredentials/.test(transform)) { + const [user, password] = transform + .slice(transform.indexOf('(') + 1, -1) + .replace(/ /g, '') + .split(',') + const userAndPassword = `${delve(this.response, user)}:${delve(this.response, password)}` + this.stash.set(name, Buffer.from(userAndPassword).toString('base64')) + } else { + throw new Error(`Unknown transform: '${transform}'`) + } + return this +} + /** * Runs a client command * @param {object} the action to perform @@ -353,7 +555,15 @@ TestRunner.prototype.exec = function (name, actions, q, done) { if (action.set) { q.add((q, done) => { const key = Object.keys(action.set)[0] - this.set(key, action.set[key]) + this.set(this.fillStashedValues(key), action.set[key]) + done() + }) + } + + if (action.transform_and_set) { + q.add((q, done) => { + const key = Object.keys(action.transform_and_set)[0] + this.transform_and_set(key, action.transform_and_set[key]) done() }) } @@ -423,8 +633,12 @@ TestRunner.prototype.exec = function (name, actions, q, done) { q.add((q, done) => { const key = Object.keys(action.length)[0] this.length( - delve(this.response, this.fillStashedValues(key)), - this.fillStashedValues(action.length)[key] + key === '$body' || key === '' + ? this.response + : delve(this.response, this.fillStashedValues(key)), + key === '$body' + ? action.length[key] + : this.fillStashedValues(action.length)[key] ) done() }) @@ -694,20 +908,6 @@ function getSkip (arr) { return null } -// code from https://github.com/developit/dlv -// needed to support an edge case: `a\.b` -// where `a.b` is a single field: { 'a.b': true } -function delve (obj, key, def, p) { - p = 0 - // handle the key with a dot inside that is not a part of the path - // and removes the backslashes from the key - key = key.split - ? key.split(/(? k.replace(/\\/g, '')) - : key.replace(/\\/g, '') - while (obj && p < key.length) obj = obj[key[p++]] - return (obj === undefined || p < key.length) ? def : obj -} - // Gets two *maybe* numbers and returns two valida numbers // it throws if one or both are not a valid number // the returned value is an array with the new values