Compare commits
42 Commits
backport-2
...
dsl
| Author | SHA1 | Date | |
|---|---|---|---|
| 6038d01f45 | |||
| e01055638e | |||
| 7d605485b2 | |||
| e932366caf | |||
| 70062550db | |||
| 3503c2d283 | |||
| cf2c0c3297 | |||
| 459e0b0cf9 | |||
| e5fa567a8c | |||
| 98dafc0b63 | |||
| c2b485f53a | |||
| 9402cf3bdc | |||
| 67e90b931a | |||
| a565230e5b | |||
| 0dadd9d057 | |||
| 126199a5ea | |||
| 48178df02e | |||
| 8a87e454a4 | |||
| 471d0a5563 | |||
| 96110ea948 | |||
| d3aeef251b | |||
| 2a14d80511 | |||
| 06b008099b | |||
| 8fce294e93 | |||
| b104ce42f5 | |||
| ecccaf023e | |||
| 917cb534b1 | |||
| 7f0e56b444 | |||
| c82ac4f5aa | |||
| ea582dd231 | |||
| 95df8ebc7d | |||
| 3903f5268b | |||
| 416d3e0851 | |||
| 714d38541e | |||
| 719fdcd99e | |||
| 041a5f4763 | |||
| ec29379e5d | |||
| f39016530d | |||
| 6eff70a47d | |||
| 3621e32dac | |||
| a702929d7f | |||
| 9a8254f9b5 |
25
.github/workflows/nodejs.yml
vendored
25
.github/workflows/nodejs.yml
vendored
@ -40,6 +40,31 @@ jobs:
|
||||
run: |
|
||||
npm run test:types
|
||||
|
||||
dsl:
|
||||
name: DSL
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
node-version: [12.x, 14.x]
|
||||
os: [ubuntu-latest, windows-latest, macOS-latest]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Use Node.js ${{ matrix.node-version }}
|
||||
uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
|
||||
- name: Install
|
||||
run: |
|
||||
npm install
|
||||
|
||||
- name: Test
|
||||
run: |
|
||||
npm run test:dsl
|
||||
|
||||
test-node-v8:
|
||||
name: Test
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@ -56,3 +56,5 @@ elasticsearch*
|
||||
test/benchmarks/macro/fixtures/*
|
||||
|
||||
*-junit.xml
|
||||
|
||||
dsl/lib
|
||||
|
||||
@ -70,3 +70,7 @@ certs
|
||||
.github
|
||||
CODE_OF_CONDUCT.md
|
||||
CONTRIBUTING.md
|
||||
|
||||
dsl/src
|
||||
dsl/examples
|
||||
dsl/tsconfig.json
|
||||
|
||||
12853
dsl/es-types.d.ts
vendored
Normal file
12853
dsl/es-types.d.ts
vendored
Normal file
File diff suppressed because it is too large
Load Diff
17
dsl/examples/README.md
Normal file
17
dsl/examples/README.md
Normal file
@ -0,0 +1,17 @@
|
||||
# Examples
|
||||
|
||||
In this folder you will find different examples to show the usage of the DSL.
|
||||
|
||||
## Instructions
|
||||
Befoire to run any of the examples in this folder you should run `npm install` for installing all the required dependenices and the run the `loadRepo` script.
|
||||
|
||||
## Run an example
|
||||
Running an example is very easy, you just need to run the following command:
|
||||
```sh
|
||||
npm run example examples/<filename>
|
||||
```
|
||||
|
||||
For example:
|
||||
```sh
|
||||
npm run example examples/last-commits.ts
|
||||
```
|
||||
62
dsl/examples/boolean-logic.ts
Normal file
62
dsl/examples/boolean-logic.ts
Normal file
@ -0,0 +1,62 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { Client } from '../../'
|
||||
import { Q, F } from '../'
|
||||
|
||||
/**
|
||||
* Pure function API
|
||||
*/
|
||||
async function run1 () {
|
||||
const client = new Client({ node: 'http://localhost:9200' })
|
||||
|
||||
// define the query clauses
|
||||
const fixDescription = Q.must(Q.match('description', 'fix'))
|
||||
const files = Q.should(Q.term('files', 'test'), Q.term('files', 'docs'))
|
||||
const author = Q.filter(Q.term('author.name', 'delvedor'))
|
||||
const { body } = await client.search({
|
||||
index: 'git',
|
||||
// use the boolean utilities to craft the final query
|
||||
body: Q(Q.and(fixDescription, files, author))
|
||||
})
|
||||
|
||||
console.log(body.hits.hits)
|
||||
}
|
||||
|
||||
/**
|
||||
* Fluent API
|
||||
*/
|
||||
async function run2 () {
|
||||
const client = new Client({ node: 'http://localhost:9200' })
|
||||
|
||||
// define the query clauses
|
||||
const fixDescription = F().must(F().match('description', 'fix'))
|
||||
const files = F().should(F().term('files', 'test').term('files', 'docs'))
|
||||
const author = F().filter(F().term('author.name', 'delvedor'))
|
||||
const { body } = await client.search({
|
||||
index: 'git',
|
||||
// use the boolean utilities to craft the final query
|
||||
body: F().and(fixDescription, files, author)
|
||||
})
|
||||
|
||||
console.log(body.hits.hits)
|
||||
}
|
||||
|
||||
run1().catch(console.log)
|
||||
run2().catch(console.log)
|
||||
120
dsl/examples/compile-query.ts
Normal file
120
dsl/examples/compile-query.ts
Normal file
@ -0,0 +1,120 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { Client } from '../../'
|
||||
import { Q, F } from '../'
|
||||
|
||||
/**
|
||||
* Pure functions API
|
||||
*/
|
||||
{
|
||||
// You can compile a query if you need to get
|
||||
// the best performances out of your code.
|
||||
// The query crafting and compilation should be done
|
||||
// outside of your hot code path.
|
||||
// First of all you should create your query almost
|
||||
// in the same way as you were doing before, the only
|
||||
// difference, is that all the paramegers you are passing
|
||||
// now should be updated with the `Q.param` API.
|
||||
// The only parameter or `Q.param`, is the name of the parameter
|
||||
// that you were passing before.
|
||||
const query = Q(
|
||||
Q.match('description', Q.param('description')),
|
||||
Q.filter(
|
||||
Q.term('author.name', Q.param('author'))
|
||||
),
|
||||
Q.size(10)
|
||||
)
|
||||
|
||||
// Afterwards, you can create an interface that represents
|
||||
// the input object of the compiled query. The input object
|
||||
// contains all the parameters you were passing before, the
|
||||
// keys are the same you have passed to the various `Q.param`
|
||||
// invocations before. It defaults to `unknown`.
|
||||
interface Input {
|
||||
description: string
|
||||
author: string
|
||||
}
|
||||
// In this example we will use `Q.compile`, the returned function
|
||||
// works in the same way as `Q.compileUnsafe` but the function returned by the
|
||||
// safe API is an order of magnitude slower.
|
||||
// `Q.compile` can be used with unstrusted input (but it's not recommended).
|
||||
// Once you have created the query and the input interface,
|
||||
// you must pass the query to `Q.compile` and store the result
|
||||
// in a variable. `Q.compile` returns a function that accepts
|
||||
// a single object parameter, which is the same you have declared
|
||||
// in the interface before.
|
||||
const compiledQuery = Q.compile<Input>(query)
|
||||
|
||||
async function run () {
|
||||
const client = new Client({ node: 'http://localhost:9200' })
|
||||
|
||||
const { body } = await client.search({
|
||||
index: 'git',
|
||||
// Finally, you call the function inside your hot code path,
|
||||
// the returned value will be the query.
|
||||
body: compiledQuery({
|
||||
description: 'fix',
|
||||
author: 'delvedor'
|
||||
})
|
||||
})
|
||||
|
||||
console.log(body.hits.hits)
|
||||
}
|
||||
|
||||
run().catch(console.log)
|
||||
}
|
||||
|
||||
/**
|
||||
* Fluent API
|
||||
*/
|
||||
{
|
||||
// The theory behind query compilation is the same here,
|
||||
// the query crafting and compilation should be done
|
||||
// outside of your hot code path.
|
||||
const query = F()
|
||||
.match('description', Q.param('description'))
|
||||
.filter(
|
||||
F().term('author.name', Q.param('author'))
|
||||
)
|
||||
.size(10)
|
||||
|
||||
interface Input {
|
||||
description: string
|
||||
author: string
|
||||
}
|
||||
|
||||
const compiledQuery = query.compile<Input>()
|
||||
|
||||
async function run () {
|
||||
const client = new Client({ node: 'http://localhost:9200' })
|
||||
|
||||
const { body } = await client.search({
|
||||
index: 'git',
|
||||
body: compiledQuery({
|
||||
description: 'fix',
|
||||
author: 'delvedor'
|
||||
})
|
||||
})
|
||||
|
||||
console.log(body.hits.hits)
|
||||
}
|
||||
|
||||
run().catch(console.log)
|
||||
}
|
||||
120
dsl/examples/compile-unsafe-query.ts
Normal file
120
dsl/examples/compile-unsafe-query.ts
Normal file
@ -0,0 +1,120 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { Client } from '../../'
|
||||
import { Q, F } from '../'
|
||||
|
||||
/**
|
||||
* Pure functions API
|
||||
*/
|
||||
{
|
||||
// You can compile a query if you need to get
|
||||
// the best performances out of your code.
|
||||
// The query crafting and compilation should be done
|
||||
// outside of your hot code path.
|
||||
// First of all you should create your query almost
|
||||
// in the same way as you were doing before, the only
|
||||
// difference, is that all the paramegers you are passing
|
||||
// now should be updated with the `Q.param` API.
|
||||
// The only parameter or `Q.param`, is the name of the parameter
|
||||
// that you were passing before.
|
||||
const query = Q(
|
||||
Q.match('description', Q.param('description')),
|
||||
Q.filter(
|
||||
Q.term('author.name', Q.param('author'))
|
||||
),
|
||||
Q.size(10)
|
||||
)
|
||||
|
||||
// Afterwards, you can create an interface that represents
|
||||
// the input object of the compiled query. The input object
|
||||
// contains all the parameters you were passing before, the
|
||||
// keys are the same you have passed to the various `Q.param`
|
||||
// invocations before. It defaults to `unknown`.
|
||||
interface Input {
|
||||
description: string
|
||||
author: string
|
||||
}
|
||||
// In this example we will use `Q.compileUnsafe`, the returned function
|
||||
// works in the same way as `Q.compile` but the function returned by the
|
||||
// unsafe API is an order of magnitude faster.
|
||||
// You should NEVER use `Q.compileUnsafe` with untrusted input.
|
||||
// Once you have created the query and the input interface,
|
||||
// you must pass the query to `Q.compileUnsafe` and store the result
|
||||
// in a variable. `Q.compile` returns a function that accepts
|
||||
// a single object parameter, which is the same you have declared
|
||||
// in the interface before.
|
||||
const compiledQuery = Q.compileUnsafe<Input>(query)
|
||||
|
||||
async function run () {
|
||||
const client = new Client({ node: 'http://localhost:9200' })
|
||||
|
||||
const { body } = await client.search({
|
||||
index: 'git',
|
||||
// Finally, you call the function inside your hot code path,
|
||||
// the returned value will be the query.
|
||||
body: compiledQuery({
|
||||
description: 'fix',
|
||||
author: 'delvedor'
|
||||
})
|
||||
})
|
||||
|
||||
console.log(body.hits.hits)
|
||||
}
|
||||
|
||||
run().catch(console.log)
|
||||
}
|
||||
|
||||
/**
|
||||
* Fluent API
|
||||
*/
|
||||
{
|
||||
// The theory behind query compilation is the same here,
|
||||
// the query crafting and compilation should be done
|
||||
// outside of your hot code path.
|
||||
const query = F()
|
||||
.match('description', Q.param('description'))
|
||||
.filter(
|
||||
F().term('author.name', Q.param('author'))
|
||||
)
|
||||
.size(10)
|
||||
|
||||
interface Input {
|
||||
description: string
|
||||
author: string
|
||||
}
|
||||
|
||||
const compiledQuery = query.compileUnsafe<Input>()
|
||||
|
||||
async function run () {
|
||||
const client = new Client({ node: 'http://localhost:9200' })
|
||||
|
||||
const { body } = await client.search({
|
||||
index: 'git',
|
||||
body: compiledQuery({
|
||||
description: 'fix',
|
||||
author: 'delvedor'
|
||||
})
|
||||
})
|
||||
|
||||
console.log(body.hits.hits)
|
||||
}
|
||||
|
||||
run().catch(console.log)
|
||||
}
|
||||
73
dsl/examples/day-most-commits.ts
Normal file
73
dsl/examples/day-most-commits.ts
Normal file
@ -0,0 +1,73 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { Client } from '../../'
|
||||
import { Q, A, F } from '../'
|
||||
|
||||
/**
|
||||
* Pure function API
|
||||
*/
|
||||
async function run1 () {
|
||||
const client = new Client({ node: 'http://localhost:9200' })
|
||||
|
||||
// get the day where the most commits were made
|
||||
const { body } = await client.search({
|
||||
index: 'git',
|
||||
body: Q(
|
||||
Q.size(0),
|
||||
// 'day_most_commits' is the name of the aggregation
|
||||
A(A.day_most_commits.dateHistogram({
|
||||
field: 'committed_date',
|
||||
interval: 'day',
|
||||
min_doc_count: 1,
|
||||
order: { _count: 'desc' }
|
||||
}))
|
||||
)
|
||||
})
|
||||
|
||||
console.log(body.aggregations)
|
||||
}
|
||||
|
||||
/**
|
||||
* Fluent API
|
||||
*/
|
||||
async function run2 () {
|
||||
const client = new Client({ node: 'http://localhost:9200' })
|
||||
|
||||
// get the day where the most commits were made
|
||||
const { body } = await client.search({
|
||||
index: 'git',
|
||||
body: F()
|
||||
.size(0)
|
||||
// 'day_most_commits' is the name of the aggregation
|
||||
.aggs(
|
||||
A.day_most_commits.dateHistogram({
|
||||
field: 'committed_date',
|
||||
interval: 'day',
|
||||
min_doc_count: 1,
|
||||
order: { _count: 'desc' }
|
||||
})
|
||||
)
|
||||
})
|
||||
|
||||
console.log(body.aggregations)
|
||||
}
|
||||
|
||||
run1().catch(console.log)
|
||||
run2().catch(console.log)
|
||||
47
dsl/examples/extend-aggregations.ts
Normal file
47
dsl/examples/extend-aggregations.ts
Normal file
@ -0,0 +1,47 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { Client } from '../../'
|
||||
import { Q, A } from '../'
|
||||
|
||||
async function run () {
|
||||
const client = new Client({ node: 'http://localhost:9200' })
|
||||
|
||||
// 'committers' is the name of the aggregation
|
||||
let committersAgg = A.committers.terms('committer.name.keyword')
|
||||
// instead of pass other aggregations as parameter
|
||||
// to the parent aggregation, you can conditionally add them
|
||||
if (Math.random() >= 0.5) {
|
||||
committersAgg = A.committers.aggs(
|
||||
committersAgg, A.line_stats.stats('stat.insertions')
|
||||
)
|
||||
}
|
||||
|
||||
const { body } = await client.search({
|
||||
index: 'git',
|
||||
body: Q(
|
||||
Q.size(0),
|
||||
A(committersAgg)
|
||||
)
|
||||
})
|
||||
|
||||
console.log(body.aggregations)
|
||||
}
|
||||
|
||||
run().catch(console.log)
|
||||
66
dsl/examples/extend-queries.ts
Normal file
66
dsl/examples/extend-queries.ts
Normal file
@ -0,0 +1,66 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { Client } from '../../'
|
||||
import { Q } from '../'
|
||||
|
||||
async function run () {
|
||||
const client = new Client({ node: 'http://localhost:9200' })
|
||||
|
||||
// the result must be fixes done by delvedor
|
||||
let query = Q.bool(
|
||||
Q.must(Q.match('description', 'fix')),
|
||||
Q.filter(Q.term('author.name', 'delvedor'))
|
||||
)
|
||||
|
||||
// Based on a condition, we want to enrich our query
|
||||
if (Math.random() >= 0.5) {
|
||||
// the results must be fixes done by delvedor
|
||||
// on test or do files
|
||||
const should = Q.should(
|
||||
Q.term('files', 'test'),
|
||||
Q.term('files', 'docs')
|
||||
)
|
||||
// The code below produces the same as the one above
|
||||
// If you need to check multiple values for the same key,
|
||||
// you can pass an array of strings instead of calling
|
||||
// the query function multiple times
|
||||
// ```
|
||||
// const should = Q.should(
|
||||
// Q.term('files', ['test', 'docs'])
|
||||
// )
|
||||
// ```
|
||||
query = Q.and(query, should)
|
||||
} else {
|
||||
// the results must be fixes or features done by delvedor
|
||||
const must = Q.must(
|
||||
Q.match('description', 'feature')
|
||||
)
|
||||
query = Q.or(query, must)
|
||||
}
|
||||
|
||||
const { body } = await client.search({
|
||||
index: 'git',
|
||||
body: Q(query)
|
||||
})
|
||||
|
||||
console.log(body.hits.hits)
|
||||
}
|
||||
|
||||
run().catch(console.log)
|
||||
63
dsl/examples/fix-commit.ts
Normal file
63
dsl/examples/fix-commit.ts
Normal file
@ -0,0 +1,63 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { Client } from '../../'
|
||||
import { Q, F } from '../'
|
||||
|
||||
/**
|
||||
* Pure function API
|
||||
*/
|
||||
async function run1 () {
|
||||
const client = new Client({ node: 'http://localhost:9200' })
|
||||
|
||||
// search commits that contains 'fix' but do not changes test files
|
||||
const { body } = await client.search({
|
||||
index: 'git',
|
||||
body: Q(
|
||||
// You can avoid to call `Q.must`, as any query will be
|
||||
// sent inside a `must` block unless specified otherwise
|
||||
Q.match('description', 'fix'),
|
||||
Q.mustNot(Q.term('files', 'test'))
|
||||
)
|
||||
})
|
||||
|
||||
console.log(body.hits.hits)
|
||||
}
|
||||
|
||||
/**
|
||||
* Fluent API
|
||||
*/
|
||||
async function run2 () {
|
||||
const client = new Client({ node: 'http://localhost:9200' })
|
||||
|
||||
// search commits that contains 'fix' but do not changes test files
|
||||
const { body } = await client.search({
|
||||
index: 'git',
|
||||
body: F()
|
||||
// You can avoid to call `.must`, as any query will be
|
||||
// sent inside a `must` block unless specified otherwise
|
||||
.match('description', 'fix')
|
||||
.mustNot(F().term('files', 'test'))
|
||||
})
|
||||
|
||||
console.log(body.hits.hits)
|
||||
}
|
||||
|
||||
run1().catch(console.log)
|
||||
run2().catch(console.log)
|
||||
61
dsl/examples/last-commits.ts
Normal file
61
dsl/examples/last-commits.ts
Normal file
@ -0,0 +1,61 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { Client } from '../../'
|
||||
import { Q, F } from '../'
|
||||
|
||||
/**
|
||||
* Pure function API
|
||||
*/
|
||||
async function run1 () {
|
||||
const client = new Client({ node: 'http://localhost:9200' })
|
||||
|
||||
// last 10 commits for 'elasticsearch-js' repo
|
||||
const { body } = await client.search({
|
||||
index: 'git',
|
||||
body: Q(
|
||||
Q.term('repository', 'elasticsearch-js'),
|
||||
Q.sort('committed_date', { order: 'desc' }),
|
||||
Q.size(10)
|
||||
)
|
||||
})
|
||||
|
||||
console.log(body.hits.hits)
|
||||
}
|
||||
|
||||
/**
|
||||
* Fluent API
|
||||
*/
|
||||
async function run2 () {
|
||||
const client = new Client({ node: 'http://localhost:9200' })
|
||||
|
||||
// last 10 commits for 'elasticsearch-js' repo
|
||||
const { body } = await client.search({
|
||||
index: 'git',
|
||||
body: F()
|
||||
.term('repository', 'elasticsearch-js')
|
||||
.sort('committed_date', { order: 'desc' })
|
||||
.size(10)
|
||||
})
|
||||
|
||||
console.log(body.hits.hits)
|
||||
}
|
||||
|
||||
run1().catch(console.log)
|
||||
run2().catch(console.log)
|
||||
159
dsl/examples/loadRepo.js
Normal file
159
dsl/examples/loadRepo.js
Normal file
@ -0,0 +1,159 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
const minimist = require('minimist')
|
||||
const Git = require('simple-git/promise')
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
|
||||
start(minimist(process.argv.slice(2), {
|
||||
string: ['elasticsearch', 'index', 'repository'],
|
||||
default: {
|
||||
elasticsearch: 'http://localhost:9200',
|
||||
index: 'git',
|
||||
repository: 'elasticsearch-js'
|
||||
}
|
||||
}))
|
||||
|
||||
async function start ({ elasticsearch, index, repository }) {
|
||||
const client = new Client({ node: elasticsearch })
|
||||
await createIndex({ client, index })
|
||||
await loadHistory({ client, index, repository })
|
||||
}
|
||||
|
||||
async function createIndex ({ client, index }) {
|
||||
const userMapping = {
|
||||
properties: {
|
||||
name: {
|
||||
type: 'text',
|
||||
fields: {
|
||||
keyword: { type: 'keyword' }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
await client.indices.create({
|
||||
index,
|
||||
body: {
|
||||
settings: {
|
||||
// just one shard, no replicas for testing
|
||||
number_of_shards: 1,
|
||||
number_of_replicas: 0,
|
||||
// custom analyzer for analyzing file paths
|
||||
analysis: {
|
||||
analyzer: {
|
||||
file_path: {
|
||||
type: 'custom',
|
||||
tokenizer: 'path_hierarchy',
|
||||
filter: ['lowercase']
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
mappings: {
|
||||
properties: {
|
||||
repository: { type: 'keyword' },
|
||||
sha: { type: 'keyword' },
|
||||
author: userMapping,
|
||||
authored_date: { type: 'date' },
|
||||
committer: userMapping,
|
||||
committed_date: { type: 'date' },
|
||||
parent_shas: { type: 'keyword' },
|
||||
description: { type: 'text', analyzer: 'snowball' },
|
||||
files: { type: 'text', analyzer: 'file_path', fielddata: true }
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
async function loadHistory ({ client, index, repository }) {
|
||||
const git = Git(process.cwd())
|
||||
// Get the result of 'git log'
|
||||
const { all: history } = await git.log({
|
||||
format: {
|
||||
hash: '%H',
|
||||
parentHashes: '%P',
|
||||
authorName: '%an',
|
||||
authorEmail: '%ae',
|
||||
authorDate: '%ai',
|
||||
committerName: '%cn',
|
||||
committerEmail: '%ce',
|
||||
committerDate: '%cd',
|
||||
subject: '%s'
|
||||
}
|
||||
})
|
||||
|
||||
// Get the stats for every commit
|
||||
for (var i = 0; i < history.length; i++) {
|
||||
const commit = history[i]
|
||||
const stat = await git.show(['--numstat', '--oneline', commit.hash])
|
||||
commit.files = []
|
||||
commit.stat = stat
|
||||
.split('\n')
|
||||
.slice(1)
|
||||
.filter(Boolean)
|
||||
.reduce((acc, val, index) => {
|
||||
const [insertions, deletions, file] = val.split('\t')
|
||||
commit.files.push(file)
|
||||
acc.files++
|
||||
acc.insertions += Number(insertions)
|
||||
acc.deletions += Number(deletions)
|
||||
return acc
|
||||
}, { insertions: 0, deletions: 0, files: 0 })
|
||||
}
|
||||
|
||||
// Index the data, 500 commits at a time
|
||||
var count = 0
|
||||
var chunk = history.slice(count, count + 500)
|
||||
while (chunk.length > 0) {
|
||||
const { body } = await client.bulk({
|
||||
body: chunk.reduce((body, commit) => {
|
||||
body.push({ index: { _index: index, _id: commit.hash } })
|
||||
body.push({
|
||||
repository,
|
||||
sha: commit.hash,
|
||||
author: {
|
||||
name: commit.authorName,
|
||||
email: commit.authorEmail
|
||||
},
|
||||
authored_date: new Date(commit.authorDate).toISOString(),
|
||||
committer: {
|
||||
name: commit.committerName,
|
||||
email: commit.committerEmail
|
||||
},
|
||||
committed_date: new Date(commit.committerDate).toISOString(),
|
||||
parent_shas: commit.parentHashes,
|
||||
description: commit.subject,
|
||||
files: commit.files,
|
||||
stat: commit.stat
|
||||
})
|
||||
return body
|
||||
}, [])
|
||||
})
|
||||
if (body.errors) {
|
||||
console.log(JSON.stringify(body.items[0], null, 2))
|
||||
process.exit(1)
|
||||
}
|
||||
count += 500
|
||||
chunk = history.slice(count, count + 500)
|
||||
}
|
||||
}
|
||||
77
dsl/examples/top-committers.ts
Normal file
77
dsl/examples/top-committers.ts
Normal file
@ -0,0 +1,77 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { Client } from '../../'
|
||||
import { Q, A, F } from '../'
|
||||
|
||||
/**
|
||||
* Pure function API
|
||||
*/
|
||||
async function run1 () {
|
||||
const client = new Client({ node: 'http://localhost:9200' })
|
||||
|
||||
// top committers aggregation
|
||||
// 'committers' is the name of the aggregation
|
||||
const committersAgg = A.committers.terms(
|
||||
{ field: 'committer.name.keyword' },
|
||||
// you can nest multiple aggregations by
|
||||
// passing them to the aggregation constructor
|
||||
// 'line_stats' is the name of the aggregation
|
||||
A.line_stats.stats({ field: 'stat.insertions' })
|
||||
)
|
||||
const { body } = await client.search({
|
||||
index: 'git',
|
||||
body: Q(
|
||||
Q.matchAll(),
|
||||
Q.size(0),
|
||||
A(committersAgg)
|
||||
)
|
||||
})
|
||||
|
||||
console.log(body.aggregations)
|
||||
}
|
||||
|
||||
/**
|
||||
* Fluent API
|
||||
*/
|
||||
async function run2 () {
|
||||
const client = new Client({ node: 'http://localhost:9200' })
|
||||
|
||||
// top committers aggregation
|
||||
// 'committers' is the name of the aggregation
|
||||
const committersAgg = A.committers.terms(
|
||||
{ field: 'committer.name.keyword' },
|
||||
// you can nest multiple aggregations by
|
||||
// passing them to the aggregation constructor
|
||||
// 'line_stats' is the name of the aggregation
|
||||
A.line_stats.stats({ field: 'stat.insertions' })
|
||||
)
|
||||
const { body } = await client.search({
|
||||
index: 'git',
|
||||
body: F()
|
||||
.matchAll()
|
||||
.size(0)
|
||||
.aggs(committersAgg)
|
||||
})
|
||||
|
||||
console.log(body.aggregations)
|
||||
}
|
||||
|
||||
run1().catch(console.log)
|
||||
run2().catch(console.log)
|
||||
105
dsl/examples/top-month.ts
Normal file
105
dsl/examples/top-month.ts
Normal file
@ -0,0 +1,105 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { Client } from '../../'
|
||||
import { Q, A, F } from '../'
|
||||
|
||||
/**
|
||||
* Pure function API
|
||||
*/
|
||||
async function run1 () {
|
||||
const client = new Client({ node: 'http://localhost:9200' })
|
||||
|
||||
const committers = A.committers.terms(
|
||||
{ field: 'committer.name.keyword' },
|
||||
A.insertions.sum({ field: 'stat.insertions' })
|
||||
)
|
||||
const topCommittersPerMonth = A.top_committer_per_month.maxBucket(
|
||||
{ bucket_path: 'committers>insertions' }
|
||||
)
|
||||
const commitsPerMonth = A.commits_per_month.dateHistogram(
|
||||
{
|
||||
field: 'committed_date',
|
||||
interval: 'day',
|
||||
min_doc_count: 1,
|
||||
order: { _count: 'desc' }
|
||||
},
|
||||
// nested aggregations
|
||||
committers,
|
||||
topCommittersPerMonth
|
||||
)
|
||||
const topCommittersPerMonthGlobal = A.top_committer_per_month.maxBucket(
|
||||
{ bucket_path: 'commits_per_month>top_committer_per_month' }
|
||||
)
|
||||
|
||||
const { body: topMonths } = await client.search({
|
||||
index: 'git',
|
||||
body: Q(
|
||||
// we want to know the top month for 'delvedor'
|
||||
Q.filter(Q.term('author', 'delvedor')),
|
||||
Q.size(0),
|
||||
A(commitsPerMonth, topCommittersPerMonthGlobal)
|
||||
)
|
||||
})
|
||||
|
||||
console.log(topMonths)
|
||||
}
|
||||
|
||||
/**
|
||||
* Fluent API
|
||||
*/
|
||||
async function run2 () {
|
||||
const client = new Client({ node: 'http://localhost:9200' })
|
||||
|
||||
const committers = A.committers.terms(
|
||||
{ field: 'committer.name.keyword' },
|
||||
A.insertions.sum({ field: 'stat.insertions' })
|
||||
)
|
||||
const topCommittersPerMonth = A.top_committer_per_month.maxBucket(
|
||||
{ bucket_path: 'committers>insertions' }
|
||||
)
|
||||
const commitsPerMonth = A.commits_per_month.dateHistogram(
|
||||
{
|
||||
field: 'committed_date',
|
||||
interval: 'day',
|
||||
min_doc_count: 1,
|
||||
order: { _count: 'desc' }
|
||||
},
|
||||
// nested aggregations
|
||||
committers,
|
||||
topCommittersPerMonth
|
||||
)
|
||||
const topCommittersPerMonthGlobal = A.top_committer_per_month.maxBucket(
|
||||
{ bucket_path: 'commits_per_month>top_committer_per_month' }
|
||||
)
|
||||
|
||||
const { body: topMonths } = await client.search({
|
||||
index: 'git',
|
||||
body: F()
|
||||
// we want to know the top month for 'delvedor'
|
||||
.filter(F().term('author', 'delvedor'))
|
||||
.size(0)
|
||||
.aggs(commitsPerMonth, topCommittersPerMonthGlobal)
|
||||
})
|
||||
|
||||
console.log(topMonths)
|
||||
}
|
||||
|
||||
run1().catch(console.log)
|
||||
run2().catch(console.log)
|
||||
24
dsl/index.d.ts
vendored
Normal file
24
dsl/index.d.ts
vendored
Normal file
@ -0,0 +1,24 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import Q from './lib/query'
|
||||
import A from './lib/aggregation'
|
||||
import F from './lib/fluent'
|
||||
|
||||
export { Q, A, F }
|
||||
26
dsl/index.js
Normal file
26
dsl/index.js
Normal file
@ -0,0 +1,26 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
const Q = require('./lib/query').default
|
||||
const A = require('./lib/aggregation').default
|
||||
const F = require('./lib/fluent').default
|
||||
|
||||
module.exports = { Q, A, F }
|
||||
383
dsl/src/aggregation.ts
Normal file
383
dsl/src/aggregation.ts
Normal file
@ -0,0 +1,383 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
/* eslint camelcase: 0 */
|
||||
/* eslint no-undef: 0 */
|
||||
/* eslint no-use-before-define: 0 */
|
||||
/* eslint no-redeclare: 0 */
|
||||
|
||||
import * as t from './types'
|
||||
|
||||
type aggsOptions = Record<string, any> | string
|
||||
|
||||
function _A (...aggregations: any[]): any {
|
||||
return {
|
||||
// @ts-ignore
|
||||
aggs: Object.assign.apply(null, aggregations)
|
||||
}
|
||||
}
|
||||
|
||||
interface Aggregations {
|
||||
(...aggregations: any[]): any
|
||||
[name: string]: {
|
||||
// add aggregations to a parent aggregation
|
||||
aggs(...aggregations: any[]): t.Aggregation
|
||||
// Metric aggregations
|
||||
avg(opts: aggsOptions): t.Aggregation
|
||||
weightedAvg(opts: aggsOptions): t.Aggregation
|
||||
cardinality(opts: aggsOptions): t.Aggregation
|
||||
extendedStats(opts: aggsOptions): t.Aggregation
|
||||
geoBounds(opts: aggsOptions): t.Aggregation
|
||||
geoCentroid(opts: aggsOptions): t.Aggregation
|
||||
max(opts: aggsOptions): t.Aggregation
|
||||
min(opts: aggsOptions): t.Aggregation
|
||||
percentiles(opts: aggsOptions): t.Aggregation
|
||||
percentileRanks(opts: aggsOptions): t.Aggregation
|
||||
scriptedMetric(opts: aggsOptions): t.Aggregation
|
||||
stats(opts: aggsOptions): t.Aggregation
|
||||
sum(opts: aggsOptions): t.Aggregation
|
||||
topHits(opts: aggsOptions): t.Aggregation
|
||||
valueCount(opts: aggsOptions): t.Aggregation
|
||||
medianAbsoluteDeviation(opts: aggsOptions): t.Aggregation
|
||||
// Buckets aggregations
|
||||
adjacencyMatrix(opts: aggsOptions, ...aggregations: any[]): t.Aggregation
|
||||
autoDateHistogram(opts: aggsOptions, ...aggregations: any[]): t.Aggregation
|
||||
children(opts: aggsOptions, ...aggregations: any[]): t.Aggregation
|
||||
composite(opts: aggsOptions, ...aggregations: any[]): t.Aggregation
|
||||
dateHistogram(opts: aggsOptions, ...aggregations: any[]): t.Aggregation
|
||||
dateRange(opts: aggsOptions, ...aggregations: any[]): t.Aggregation
|
||||
diversifiedSampler(opts: aggsOptions, ...aggregations: any[]): t.Aggregation
|
||||
filter(opts: aggsOptions, ...aggregations: any[]): t.Aggregation
|
||||
filters(opts: aggsOptions, ...aggregations: any[]): t.Aggregation
|
||||
geoDistance(opts: aggsOptions, ...aggregations: any[]): t.Aggregation
|
||||
geohashGrid(opts: aggsOptions, ...aggregations: any[]): t.Aggregation
|
||||
geotileGrid(opts: aggsOptions, ...aggregations: any[]): t.Aggregation
|
||||
global(opts: aggsOptions, ...aggregations: any[]): t.Aggregation
|
||||
histogram(opts: aggsOptions, ...aggregations: any[]): t.Aggregation
|
||||
ipRange(opts: aggsOptions, ...aggregations: any[]): t.Aggregation
|
||||
missing(opts: aggsOptions, ...aggregations: any[]): t.Aggregation
|
||||
nested(opts: aggsOptions, ...aggregations: any[]): t.Aggregation
|
||||
parent(opts: aggsOptions, ...aggregations: any[]): t.Aggregation
|
||||
range(opts: aggsOptions, ...aggregations: any[]): t.Aggregation
|
||||
reverseNested(opts: aggsOptions, ...aggregations: any[]): t.Aggregation
|
||||
sampler(opts: aggsOptions, ...aggregations: any[]): t.Aggregation
|
||||
significantTerms(opts: aggsOptions, ...aggregations: any[]): t.Aggregation
|
||||
significantText(opts: aggsOptions, ...aggregations: any[]): t.Aggregation
|
||||
terms(opts: aggsOptions, ...aggregations: any[]): t.Aggregation
|
||||
// Pipeline aggregations
|
||||
avgBucket (opts: aggsOptions): t.Aggregation
|
||||
derivative (opts: aggsOptions): t.Aggregation
|
||||
maxBucket (opts: aggsOptions): t.Aggregation
|
||||
minBucket (opts: aggsOptions): t.Aggregation
|
||||
sumBucket (opts: aggsOptions): t.Aggregation
|
||||
statsBucket (opts: aggsOptions): t.Aggregation
|
||||
extendedStatsBucket (opts: aggsOptions): t.Aggregation
|
||||
percentilesBucket (opts: aggsOptions): t.Aggregation
|
||||
movingAvg (opts: aggsOptions): t.Aggregation
|
||||
movingFn (opts: aggsOptions): t.Aggregation
|
||||
cumulativeSum (opts: aggsOptions): t.Aggregation
|
||||
bucketScript (opts: aggsOptions): t.Aggregation
|
||||
bucketSelector (opts: aggsOptions): t.Aggregation
|
||||
bucketSort (opts: aggsOptions): t.Aggregation
|
||||
serialDiff (opts: aggsOptions): t.Aggregation
|
||||
// Matrix aggregations
|
||||
matrixStats (opts: aggsOptions): t.Aggregation
|
||||
}
|
||||
}
|
||||
|
||||
const aggregations = {
|
||||
get (target: unknown, name: string) {
|
||||
return {
|
||||
// add aggregations to a parent aggregation
|
||||
aggs (...aggregations: any[]): t.Aggregation {
|
||||
return updateAggsObject(name, aggregations)
|
||||
},
|
||||
|
||||
// Metric aggregations
|
||||
avg (opts: aggsOptions): t.Aggregation {
|
||||
return generateAggsObject('avg', name, isString(opts) ? 'field' : null, opts)
|
||||
},
|
||||
|
||||
weightedAvg (opts: aggsOptions): t.Aggregation {
|
||||
return generateAggsObject('weighted_avg', name, null, opts)
|
||||
},
|
||||
|
||||
cardinality (opts: aggsOptions): t.Aggregation {
|
||||
return generateAggsObject('cardinality', name, isString(opts) ? 'field' : null, opts)
|
||||
},
|
||||
|
||||
extendedStats (opts: aggsOptions): t.Aggregation {
|
||||
return generateAggsObject('extended_stats', name, isString(opts) ? 'field' : null, opts)
|
||||
},
|
||||
|
||||
geoBounds (opts: aggsOptions): t.Aggregation {
|
||||
return generateAggsObject('geo_bounds', name, isString(opts) ? 'field' : null, opts)
|
||||
},
|
||||
|
||||
geoCentroid (opts: aggsOptions): t.Aggregation {
|
||||
return generateAggsObject('geo_centroid', name, isString(opts) ? 'field' : null, opts)
|
||||
},
|
||||
|
||||
max (opts: aggsOptions): t.Aggregation {
|
||||
return generateAggsObject('max', name, isString(opts) ? 'field' : null, opts)
|
||||
},
|
||||
|
||||
min (opts: aggsOptions): t.Aggregation {
|
||||
return generateAggsObject('min', name, isString(opts) ? 'field' : null, opts)
|
||||
},
|
||||
|
||||
percentiles (opts: aggsOptions): t.Aggregation {
|
||||
return generateAggsObject('percentiles', name, isString(opts) ? 'field' : null, opts)
|
||||
},
|
||||
|
||||
percentileRanks (opts: aggsOptions): t.Aggregation {
|
||||
return generateAggsObject('percentile_ranks', name, null, opts)
|
||||
},
|
||||
|
||||
scriptedMetric (opts: aggsOptions): t.Aggregation {
|
||||
return generateAggsObject('scripted_metric', name, null, opts)
|
||||
},
|
||||
|
||||
stats (opts: aggsOptions): t.Aggregation {
|
||||
return generateAggsObject('stats', name, isString(opts) ? 'field' : null, opts)
|
||||
},
|
||||
|
||||
sum (opts: aggsOptions): t.Aggregation {
|
||||
return generateAggsObject('sum', name, isString(opts) ? 'field' : null, opts)
|
||||
},
|
||||
|
||||
topHits (opts: aggsOptions): t.Aggregation {
|
||||
return generateAggsObject('top_hits', name, null, opts)
|
||||
},
|
||||
|
||||
valueCount (opts: aggsOptions): t.Aggregation {
|
||||
return generateAggsObject('value_count', name, isString(opts) ? 'field' : null, opts)
|
||||
},
|
||||
|
||||
medianAbsoluteDeviation (opts: aggsOptions): t.Aggregation {
|
||||
return generateAggsObject('median_absolute_deviation', name, isString(opts) ? 'field' : null, opts)
|
||||
},
|
||||
|
||||
// Buckets aggregations
|
||||
adjacencyMatrix (opts: aggsOptions, ...aggregations: any[]): t.Aggregation {
|
||||
return generateAggsObject('adjacency_matrix', name, null, opts, aggregations)
|
||||
},
|
||||
|
||||
autoDateHistogram (opts: aggsOptions, ...aggregations: any[]): t.Aggregation {
|
||||
return generateAggsObject('auto_date_histogram', name, isString(opts) ? 'field' : null, opts, aggregations)
|
||||
},
|
||||
|
||||
children (opts: aggsOptions, ...aggregations: any[]): t.Aggregation {
|
||||
return generateAggsObject('children', name, isString(opts) ? 'type' : null, opts, aggregations)
|
||||
},
|
||||
|
||||
composite (opts: aggsOptions, ...aggregations: any[]): t.Aggregation {
|
||||
return generateAggsObject('composite', name, null, opts, aggregations)
|
||||
},
|
||||
|
||||
dateHistogram (opts: aggsOptions, ...aggregations: any[]): t.Aggregation {
|
||||
return generateAggsObject('date_histogram', name, null, opts, aggregations)
|
||||
},
|
||||
|
||||
dateRange (opts: aggsOptions, ...aggregations: any[]): t.Aggregation {
|
||||
return generateAggsObject('date_range', name, null, opts, aggregations)
|
||||
},
|
||||
|
||||
diversifiedSampler (opts: aggsOptions, ...aggregations: any[]): t.Aggregation {
|
||||
return generateAggsObject('diversified_sampler', name, isString(opts) ? 'field' : null, opts, aggregations)
|
||||
},
|
||||
|
||||
filter (opts: aggsOptions, ...aggregations: any[]): t.Aggregation {
|
||||
return generateAggsObject('filter', name, null, opts, aggregations)
|
||||
},
|
||||
|
||||
filters (opts: aggsOptions, ...aggregations: any[]): t.Aggregation {
|
||||
return generateAggsObject('filters', name, null, opts, aggregations)
|
||||
},
|
||||
|
||||
geoDistance (opts: aggsOptions, ...aggregations: any[]): t.Aggregation {
|
||||
return generateAggsObject('geo_distance', name, null, opts, aggregations)
|
||||
},
|
||||
|
||||
geohashGrid (opts: aggsOptions, ...aggregations: any[]): t.Aggregation {
|
||||
return generateAggsObject('geohash_grid', name, isString(opts) ? 'field' : null, opts, aggregations)
|
||||
},
|
||||
|
||||
geotileGrid (opts: aggsOptions, ...aggregations: any[]): t.Aggregation {
|
||||
return generateAggsObject('geotile_grid', name, isString(opts) ? 'field' : null, opts, aggregations)
|
||||
},
|
||||
|
||||
global (opts: aggsOptions, ...aggregations: any[]): t.Aggregation {
|
||||
return generateAggsObject('global', name, null, opts, aggregations)
|
||||
},
|
||||
|
||||
histogram (opts: aggsOptions, ...aggregations: any[]): t.Aggregation {
|
||||
return generateAggsObject('histogram', name, null, opts, aggregations)
|
||||
},
|
||||
|
||||
ipRange (opts: aggsOptions, ...aggregations: any[]): t.Aggregation {
|
||||
return generateAggsObject('ip_range', name, null, opts, aggregations)
|
||||
},
|
||||
|
||||
missing (opts: aggsOptions, ...aggregations: any[]): t.Aggregation {
|
||||
return generateAggsObject('missing', name, isString(opts) ? 'field' : null, opts, aggregations)
|
||||
},
|
||||
|
||||
nested (opts: aggsOptions, ...aggregations: any[]): t.Aggregation {
|
||||
return generateAggsObject('nested', name, isString(opts) ? 'path' : null, opts, aggregations)
|
||||
},
|
||||
|
||||
parent (opts: aggsOptions, ...aggregations: any[]): t.Aggregation {
|
||||
return generateAggsObject('parent', name, isString(opts) ? 'type' : null, opts, aggregations)
|
||||
},
|
||||
|
||||
range (opts: aggsOptions, ...aggregations: any[]): t.Aggregation {
|
||||
return generateAggsObject('range', name, null, opts, aggregations)
|
||||
},
|
||||
|
||||
reverseNested (opts: aggsOptions, ...aggregations: any[]): t.Aggregation {
|
||||
return generateAggsObject('reverse_nested', name, null, opts, aggregations)
|
||||
},
|
||||
|
||||
sampler (opts: aggsOptions, ...aggregations: any[]): t.Aggregation {
|
||||
return generateAggsObject('sampler', name, null, opts, aggregations)
|
||||
},
|
||||
|
||||
significantTerms (opts: aggsOptions, ...aggregations: any[]): t.Aggregation {
|
||||
return generateAggsObject('significant_terms', name, isString(opts) ? 'field' : null, opts, aggregations)
|
||||
},
|
||||
|
||||
significantText (opts: aggsOptions, ...aggregations: any[]): t.Aggregation {
|
||||
return generateAggsObject('significant_text', name, isString(opts) ? 'field' : null, opts, aggregations)
|
||||
},
|
||||
|
||||
terms (opts: aggsOptions, ...aggregations: any[]): t.Aggregation {
|
||||
return generateAggsObject('terms', name, isString(opts) ? 'field' : null, opts, aggregations)
|
||||
},
|
||||
|
||||
// Pipeline aggregations
|
||||
avgBucket (opts: aggsOptions): t.Aggregation {
|
||||
return generateAggsObject('avg_bucket', name, isString(opts) ? 'buckets_path' : null, opts)
|
||||
},
|
||||
|
||||
derivative (opts: aggsOptions): t.Aggregation {
|
||||
return generateAggsObject('derivative', name, isString(opts) ? 'buckets_path' : null, opts)
|
||||
},
|
||||
|
||||
maxBucket (opts: aggsOptions): t.Aggregation {
|
||||
return generateAggsObject('max_bucket', name, isString(opts) ? 'buckets_path' : null, opts)
|
||||
},
|
||||
|
||||
minBucket (opts: aggsOptions): t.Aggregation {
|
||||
return generateAggsObject('min_bucket', name, isString(opts) ? 'buckets_path' : null, opts)
|
||||
},
|
||||
|
||||
sumBucket (opts: aggsOptions): t.Aggregation {
|
||||
return generateAggsObject('sum_bucket', name, isString(opts) ? 'buckets_path' : null, opts)
|
||||
},
|
||||
|
||||
statsBucket (opts: aggsOptions): t.Aggregation {
|
||||
return generateAggsObject('stats_bucket', name, isString(opts) ? 'buckets_path' : null, opts)
|
||||
},
|
||||
|
||||
extendedStatsBucket (opts: aggsOptions): t.Aggregation {
|
||||
return generateAggsObject('extended_stats_bucket', name, isString(opts) ? 'buckets_path' : null, opts)
|
||||
},
|
||||
|
||||
percentilesBucket (opts: aggsOptions): t.Aggregation {
|
||||
return generateAggsObject('percentiles_bucket', name, isString(opts) ? 'buckets_path' : null, opts)
|
||||
},
|
||||
|
||||
movingAvg (opts: aggsOptions): t.Aggregation {
|
||||
return generateAggsObject('moving_avg', name, isString(opts) ? 'buckets_path' : null, opts)
|
||||
},
|
||||
|
||||
movingFn (opts: aggsOptions): t.Aggregation {
|
||||
return generateAggsObject('moving_fn', name, null, opts)
|
||||
},
|
||||
|
||||
cumulativeSum (opts: aggsOptions): t.Aggregation {
|
||||
return generateAggsObject('cumulative_sum', name, isString(opts) ? 'buckets_path' : null, opts)
|
||||
},
|
||||
|
||||
bucketScript (opts: aggsOptions): t.Aggregation {
|
||||
return generateAggsObject('bucket_script', name, null, opts)
|
||||
},
|
||||
|
||||
bucketSelector (opts: aggsOptions): t.Aggregation {
|
||||
return generateAggsObject('bucket_selector', name, null, opts)
|
||||
},
|
||||
|
||||
bucketSort (opts: aggsOptions): t.Aggregation {
|
||||
return generateAggsObject('bucket_sort', name, null, opts)
|
||||
},
|
||||
|
||||
serialDiff (opts: aggsOptions): t.Aggregation {
|
||||
return generateAggsObject('serial_diff', name, isString(opts) ? 'buckets_path' : null, opts)
|
||||
},
|
||||
|
||||
// Matrix aggregations
|
||||
matrixStats (opts: aggsOptions): t.Aggregation {
|
||||
return generateAggsObject('matrix_stats', name, isString(opts) ? 'fields' : null, opts)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const A = new Proxy(_A, aggregations) as Aggregations
|
||||
|
||||
function generateAggsObject (type: string, name: string, defaultField: string | null, opts: any = {}, aggregations: any[] = []): t.Aggregation {
|
||||
if (typeof opts === 'string' && typeof defaultField === 'string') {
|
||||
opts = { [defaultField]: opts }
|
||||
} else if (typeof opts === 'string' && defaultField === null) {
|
||||
throw new Error('This method does not support shorthand options')
|
||||
}
|
||||
|
||||
if (aggregations.length > 0) {
|
||||
return {
|
||||
[name]: {
|
||||
[type]: opts,
|
||||
// @ts-ignore
|
||||
aggs: Object.assign.apply(null, aggregations)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
return {
|
||||
[name]: {
|
||||
[type]: opts
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function updateAggsObject (name: string, aggregations: any[]): t.Aggregation {
|
||||
const [main, ...others] = aggregations.filter(falsy)
|
||||
main[name].aggs = Object.assign(main[name].aggs || {}, ...others)
|
||||
return main
|
||||
}
|
||||
|
||||
function falsy (val: any): boolean {
|
||||
return !!val
|
||||
}
|
||||
|
||||
function isString (val: any): val is string {
|
||||
return typeof val === 'string'
|
||||
}
|
||||
|
||||
export default A
|
||||
345
dsl/src/fluent.ts
Normal file
345
dsl/src/fluent.ts
Normal file
@ -0,0 +1,345 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
/* eslint camelcase: 0 */
|
||||
/* eslint no-undef: 0 */
|
||||
/* eslint no-use-before-define: 0 */
|
||||
/* eslint no-redeclare: 0 */
|
||||
/* eslint no-dupe-class-members: 0 */
|
||||
/* eslint lines-between-class-members: 0 */
|
||||
|
||||
import Q from './query'
|
||||
import A from './aggregation'
|
||||
import * as t from './types'
|
||||
import T from '../es-types'
|
||||
|
||||
type SearchRequest = Required<T.SearchRequest>['body']
|
||||
interface BoolQuery {
|
||||
filter?: T.QueryContainer[]
|
||||
minimum_should_match?: T.MinimumShouldMatch
|
||||
must?: T.QueryContainer[]
|
||||
must_not?: T.QueryContainer[]
|
||||
should?: T.QueryContainer[]
|
||||
_name?: string
|
||||
}
|
||||
|
||||
const kState = Symbol('dsl-query-state')
|
||||
type MultiType = string | number | boolean
|
||||
|
||||
// TODO: the client should detect a fluent query
|
||||
// and automatically call `query.build()`
|
||||
|
||||
class FluentQ {
|
||||
[kState]: (SearchRequest | T.QueryContainer | T.QueryContainer[] | BoolQuery)[]
|
||||
constructor () {
|
||||
this[kState] = []
|
||||
}
|
||||
|
||||
build (): SearchRequest {
|
||||
return Q(...this[kState])
|
||||
}
|
||||
|
||||
buildQuery (): T.QueryContainer {
|
||||
const b = Q(...this[kState])
|
||||
return b.query != null ? b.query : {}
|
||||
}
|
||||
|
||||
param (key: string): Symbol {
|
||||
return Q.param(key)
|
||||
}
|
||||
|
||||
compileUnsafe<TInput extends Record<string, any> = Record<string, any>> (): t.compiledFunction<TInput> {
|
||||
return Q.compileUnsafe(this.build())
|
||||
}
|
||||
|
||||
compile<TInput extends Record<string, any> = Record<string, any>> (): t.compiledFunction<TInput> {
|
||||
return Q.compile(this.build())
|
||||
}
|
||||
|
||||
match (key: string, val: MultiType | Symbol): this
|
||||
match (key: string, val: MultiType | Symbol, opts: T.MatchQuery): this
|
||||
match (key: string, val: (MultiType | Symbol)[]): this
|
||||
match (key: string, val: (MultiType | Symbol)[], opts: T.MatchQuery): this
|
||||
match (key: string, val: any, opts?: any): this {
|
||||
this[kState].push(Q.match(key, val, opts))
|
||||
return this
|
||||
}
|
||||
|
||||
matchPhrase (key: string, val: string | Symbol): this
|
||||
matchPhrase (key: string, val: string | Symbol, opts: T.MatchPhraseQuery): this
|
||||
matchPhrase (key: string, val: (string | Symbol)[]): this
|
||||
matchPhrase (key: string, val: (string | Symbol)[], opts: T.MatchPhraseQuery): this
|
||||
matchPhrase (key: string, val: any, opts?: any): this {
|
||||
this[kState].push(Q.matchPhrase(key, val, opts))
|
||||
return this
|
||||
}
|
||||
|
||||
matchPhrasePrefix (key: string, val: string | Symbol): this
|
||||
matchPhrasePrefix (key: string, val: string | Symbol, opts: T.MatchPhrasePrefixQuery): this
|
||||
matchPhrasePrefix (key: string, val: (string | Symbol)[]): this
|
||||
matchPhrasePrefix (key: string, val: (string | Symbol)[], opts: T.MatchPhrasePrefixQuery): this
|
||||
matchPhrasePrefix (key: string, val: any, opts?: any): this {
|
||||
this[kState].push(Q.matchPhrasePrefix(key, val, opts))
|
||||
return this
|
||||
}
|
||||
|
||||
multiMatch (keys: string[], val: string | Symbol, opts?: T.MultiMatchQuery): this {
|
||||
this[kState].push(Q.multiMatch(keys, val, opts))
|
||||
return this
|
||||
}
|
||||
|
||||
matchAll (opts?: T.MatchAllQuery): this {
|
||||
this[kState].push(Q.matchAll(opts))
|
||||
return this
|
||||
}
|
||||
|
||||
matchNone (): this {
|
||||
this[kState].push(Q.matchNone())
|
||||
return this
|
||||
}
|
||||
|
||||
common (key: string, val: string | Symbol): this
|
||||
common (key: string, val: string | Symbol, opts: T.CommonTermsQuery): this
|
||||
common (key: string, val: (string | Symbol)[]): this
|
||||
common (key: string, val: (string | Symbol)[], opts: T.CommonTermsQuery): this
|
||||
common (key: string, val: any, opts?: any): this {
|
||||
this[kState].push(Q.common(key, val, opts))
|
||||
return this
|
||||
}
|
||||
|
||||
queryString (val: string | Symbol, opts: T.QueryStringQuery): this {
|
||||
this[kState].push(Q.queryString(val, opts))
|
||||
return this
|
||||
}
|
||||
|
||||
simpleQueryString (val: string | Symbol, opts: T.SimpleQueryStringQuery): this {
|
||||
this[kState].push(Q.simpleQueryString(val, opts))
|
||||
return this
|
||||
}
|
||||
|
||||
term (key: string, val: MultiType | Symbol): this
|
||||
term (key: string, val: MultiType | Symbol, opts: T.TermQuery): this
|
||||
term (key: string, val: (MultiType | Symbol)[]): this
|
||||
term (key: string, val: (MultiType | Symbol)[], opts: T.TermsQuery): this
|
||||
term (key: string, val: any, opts?: any): this {
|
||||
if (Array.isArray(val) && opts == null) {
|
||||
return this.terms(key, val)
|
||||
}
|
||||
this[kState].push(Q.term(key, val, opts))
|
||||
return this
|
||||
}
|
||||
|
||||
terms (key: string, val: (MultiType | Symbol)[]): this
|
||||
terms (key: string, val: (MultiType | Symbol)[], opts: T.TermsQuery): this
|
||||
terms (key: string, val: (MultiType | Symbol)[], opts?: any): this {
|
||||
this[kState].push(Q.terms(key, val, opts))
|
||||
return this
|
||||
}
|
||||
|
||||
termsSet (key: string, val: (string | Symbol)[], opts?: T.TermsSetQuery): this {
|
||||
this[kState].push(Q.termsSet(key, val, opts))
|
||||
return this
|
||||
}
|
||||
|
||||
range (key: string, opts: T.RangeQuery): this {
|
||||
this[kState].push(Q.range(key, opts))
|
||||
return this
|
||||
}
|
||||
|
||||
exists (key: string | Symbol): this
|
||||
exists (key: (string | Symbol)[]): this
|
||||
exists (key: any): this {
|
||||
if (Array.isArray(key)) {
|
||||
for (const k of key) {
|
||||
this[kState].push(Q.exists(k))
|
||||
}
|
||||
return this
|
||||
}
|
||||
this[kState].push(Q.exists(key))
|
||||
return this
|
||||
}
|
||||
|
||||
prefix (key: string, val: string | Symbol): this
|
||||
prefix (key: string, val: string | Symbol, opts: T.PrefixQuery): this
|
||||
prefix (key: string, val: (string | Symbol)[]): this
|
||||
prefix (key: string, val: (string | Symbol)[], opts: T.PrefixQuery): this
|
||||
prefix (key: string, val: any, opts?: any): this {
|
||||
this[kState].push(Q.prefix(key, val, opts))
|
||||
return this
|
||||
}
|
||||
|
||||
wildcard (key: string, val: string | Symbol): this
|
||||
wildcard (key: string, val: string | Symbol, opts: T.WildcardQuery): this
|
||||
wildcard (key: string, val: (string | Symbol)[]): this
|
||||
wildcard (key: string, val: (string | Symbol)[], opts: T.WildcardQuery): this
|
||||
wildcard (key: string, val: any, opts?: any): any {
|
||||
this[kState].push(Q.wildcard(key, val, opts))
|
||||
return this
|
||||
}
|
||||
|
||||
regexp (key: string, val: string | Symbol): this
|
||||
regexp (key: string, val: string | Symbol, opts: T.RegexpQuery): this
|
||||
regexp (key: string, val: (string | Symbol)[]): this
|
||||
regexp (key: string, val: (string | Symbol)[], opts: T.RegexpQuery): this
|
||||
regexp (key: string, val: any, opts?: any): this {
|
||||
this[kState].push(Q.regexp(key, val, opts))
|
||||
return this
|
||||
}
|
||||
|
||||
fuzzy (key: string, val: string | Symbol): this
|
||||
fuzzy (key: string, val: string | Symbol, opts: T.FuzzyQuery): this
|
||||
fuzzy (key: string, val: (string | Symbol)[]): this
|
||||
fuzzy (key: string, val: (string | Symbol)[], opts: T.FuzzyQuery): this
|
||||
fuzzy (key: string, val: any, opts?: any): this {
|
||||
this[kState].push(Q.fuzzy(key, val, opts))
|
||||
return this
|
||||
}
|
||||
|
||||
ids (key: string, val: (string | Symbol)[]): this {
|
||||
this[kState].push(Q.ids(key, val))
|
||||
return this
|
||||
}
|
||||
|
||||
must (...queries: FluentQ[]): this {
|
||||
// @ts-expect-error
|
||||
this[kState].push(Q.must(...queries.flatMap(q => q[kState])))
|
||||
return this
|
||||
}
|
||||
|
||||
should (...queries: FluentQ[]): this {
|
||||
// @ts-expect-error
|
||||
this[kState].push(Q.should(...queries.flatMap(q => q[kState])))
|
||||
return this
|
||||
}
|
||||
|
||||
mustNot (...queries: FluentQ[]): this {
|
||||
// @ts-expect-error
|
||||
this[kState].push(Q.mustNot(...queries.flatMap(q => q[kState])))
|
||||
return this
|
||||
}
|
||||
|
||||
filter (...queries: FluentQ[]): this {
|
||||
// @ts-expect-error
|
||||
this[kState].push(Q.filter(...queries.flatMap(q => q[kState])))
|
||||
return this
|
||||
}
|
||||
|
||||
bool (...queries: FluentQ[]): this {
|
||||
// @ts-expect-error
|
||||
this[kState].push(Q.bool(...queries.flatMap(q => q[kState])))
|
||||
return this
|
||||
}
|
||||
|
||||
and (...blocks: FluentQ[]): this {
|
||||
const { query = {}, ...searchRequest } = this.build()
|
||||
this[kState] = [searchRequest, Q.and(query, ...blocks.map(q => q.buildQuery()))]
|
||||
return this
|
||||
}
|
||||
|
||||
or (...blocks: FluentQ[]): this {
|
||||
const { query = {}, ...searchRequest } = this.build()
|
||||
this[kState] = [searchRequest, Q.and(query, ...blocks.map(q => q.buildQuery()))]
|
||||
return this
|
||||
}
|
||||
|
||||
not (query: FluentQ): this {
|
||||
this[kState].push(Q.not(query.buildQuery()))
|
||||
return this
|
||||
}
|
||||
|
||||
minShouldMatch (int: number): this {
|
||||
this[kState].push(Q.minShouldMatch(int))
|
||||
return this
|
||||
}
|
||||
|
||||
name (queryName: string): this {
|
||||
this[kState].push(Q.name(queryName))
|
||||
return this
|
||||
}
|
||||
|
||||
nested (path: string, query: T.QueryContainer, opts: T.NestedQuery): this {
|
||||
this[kState].push(Q.nested(path, query, opts))
|
||||
return this
|
||||
}
|
||||
|
||||
constantScore (query: T.QueryContainer, boost: number): this {
|
||||
this[kState].push(Q.constantScore(query, boost))
|
||||
return this
|
||||
}
|
||||
|
||||
disMax (queries: T.QueryContainer[], opts?: T.DisMaxQuery): this {
|
||||
this[kState].push(Q.disMax(queries, opts))
|
||||
return this
|
||||
}
|
||||
|
||||
functionScore (function_score: T.FunctionScoreQuery): this {
|
||||
this[kState].push(Q.functionScore(function_score))
|
||||
return this
|
||||
}
|
||||
|
||||
boosting (boostOpts: T.BoostingQuery): this {
|
||||
this[kState].push(Q.boosting(boostOpts))
|
||||
return this
|
||||
}
|
||||
|
||||
sort (key: string | string[]): this
|
||||
sort (key: string | string[], order: T.SortOrder): this
|
||||
sort (key: string | string[], opts: T.Sort): this
|
||||
sort (key: string | string[], opts?: any): this {
|
||||
this[kState].push(Q.sort(key, opts))
|
||||
return this
|
||||
}
|
||||
|
||||
script (source: string): this
|
||||
script (source: string, lang: string): this
|
||||
script (source: string, params: Record<string, any>, lang?: string): this
|
||||
script (source: string, params?: any, lang?: any): this {
|
||||
this[kState].push(Q.script(source, params, lang))
|
||||
return this
|
||||
}
|
||||
|
||||
size (s: number | Symbol): this {
|
||||
this[kState].push(Q.size(s))
|
||||
return this
|
||||
}
|
||||
|
||||
aggs (...aggregations: Record<string, any>[]): this {
|
||||
this[kState].push(A(...aggregations))
|
||||
return this
|
||||
}
|
||||
|
||||
raw (obj: Record<string, any>): this {
|
||||
this[kState].push(obj)
|
||||
return this
|
||||
}
|
||||
|
||||
clone (): FluentQ {
|
||||
const F = new FluentQ()
|
||||
F[kState] = this[kState].slice()
|
||||
return F
|
||||
}
|
||||
|
||||
toJSON () {
|
||||
return this.buildQuery()
|
||||
}
|
||||
}
|
||||
|
||||
export default function build () {
|
||||
return new FluentQ()
|
||||
}
|
||||
985
dsl/src/query.ts
Normal file
985
dsl/src/query.ts
Normal file
@ -0,0 +1,985 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
/* eslint camelcase: 0 */
|
||||
/* eslint no-undef: 0 */
|
||||
/* eslint no-use-before-define: 0 */
|
||||
/* eslint no-redeclare: 0 */
|
||||
/* eslint no-inner-declarations: 0 */
|
||||
|
||||
import * as t from './types'
|
||||
import T from '../es-types'
|
||||
|
||||
interface BoolQuery {
|
||||
filter?: T.QueryContainer[]
|
||||
minimum_should_match?: T.MinimumShouldMatch
|
||||
must?: T.QueryContainer[]
|
||||
must_not?: T.QueryContainer[]
|
||||
should?: T.QueryContainer[]
|
||||
_name?: string
|
||||
}
|
||||
type SearchRequest = Required<T.SearchRequest>['body']
|
||||
type BoolBlock = { bool: BoolQuery }
|
||||
type QueryBlock = { query: T.QueryContainer }
|
||||
type MultiType = string | number | boolean
|
||||
|
||||
function Q (...blocks: (SearchRequest | T.QueryContainer | T.QueryContainer[] | BoolQuery)[]): SearchRequest {
|
||||
blocks = blocks.flat()
|
||||
const topLevelKeys = [
|
||||
'aggs',
|
||||
'collapse',
|
||||
'explain',
|
||||
'from',
|
||||
'highlight',
|
||||
'indices_boost',
|
||||
'min_score',
|
||||
'post_filter',
|
||||
'profile',
|
||||
'rescore',
|
||||
'script_fields',
|
||||
'search_after',
|
||||
'size',
|
||||
'slice',
|
||||
'sort',
|
||||
'_source',
|
||||
'suggest',
|
||||
'terminate_after',
|
||||
'timeout',
|
||||
'track_scores',
|
||||
'version'
|
||||
]
|
||||
|
||||
// @ts-expect-error
|
||||
const queries: (T.QueryContainer | BoolQuery)[] = blocks.filter(block => !topLevelKeys.includes(Object.keys(block)[0]))
|
||||
|
||||
let body: SearchRequest
|
||||
if (queries.length === 1 && !isBoolQuery(queries[0])) {
|
||||
if (isQuery(queries[0])) {
|
||||
body = queries[0]
|
||||
} else {
|
||||
body = { query: queries[0] }
|
||||
}
|
||||
} else {
|
||||
if (queries.length > 0) {
|
||||
body = { query: Q.bool(...queries) }
|
||||
} else {
|
||||
body = {}
|
||||
}
|
||||
}
|
||||
for (const block of blocks) {
|
||||
const key = Object.keys(block)[0]
|
||||
if (topLevelKeys.includes(key)) {
|
||||
if (key === 'sort') {
|
||||
body.sort = body.sort || []
|
||||
// @ts-expect-error
|
||||
body.sort.push.apply(body.sort, block[key])
|
||||
} else {
|
||||
// @ts-expect-error
|
||||
body[key] = block[key]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return body
|
||||
}
|
||||
|
||||
Object.defineProperty(Q, 'name', { writable: true })
|
||||
|
||||
namespace Q {
|
||||
export function param (key: string): Symbol {
|
||||
return Symbol(key)
|
||||
}
|
||||
|
||||
export function compileUnsafe<TInput extends Record<string, any> = Record<string, any>> (query: Record<string, any>): t.compiledFunction<TInput> {
|
||||
let stringified = JSON.stringify(query, (key, value) => {
|
||||
if (typeof value === 'symbol') {
|
||||
return `###${value.description!}###`
|
||||
} else if (key === '__proto__') {
|
||||
return undefined
|
||||
} else if (key === 'constructor' && typeof value === 'object' &&
|
||||
value !== null && value.prototype !== undefined) {
|
||||
return undefined
|
||||
} else {
|
||||
return value
|
||||
}
|
||||
})
|
||||
|
||||
const keys: string[] = []
|
||||
const matches = stringified.match(/"###\w+###"/g)
|
||||
if (matches === null) {
|
||||
throw new Error('The query does not contain any use of `Q.params`')
|
||||
}
|
||||
for (const match of matches) {
|
||||
const key = match.slice(4, -4)
|
||||
keys.push(key)
|
||||
stringified = stringified.replace(new RegExp(match), `input[${JSON.stringify(key)}]`)
|
||||
}
|
||||
const code = `
|
||||
if (input == null) {
|
||||
throw new Error('Input must not be empty')
|
||||
}
|
||||
const keys = ${JSON.stringify(keys)}
|
||||
for (const key of keys) {
|
||||
if (input[key] === undefined) {
|
||||
throw new Error('Missing key: ' + key)
|
||||
}
|
||||
}
|
||||
return ${stringified}
|
||||
`
|
||||
// @ts-ignore
|
||||
return new Function('input', code) // eslint-disable-line
|
||||
}
|
||||
|
||||
export function compile<TInput extends Record<string, any> = Record<string, any>> (query: Record<string, any>): t.compiledFunction<TInput> {
|
||||
const params: Array<{ path: string[], key: string }> = []
|
||||
traverse(query, [])
|
||||
|
||||
if (params.length === 0) {
|
||||
throw new Error('The query does not contain any use of `Q.params`')
|
||||
}
|
||||
|
||||
return function (input: TInput): Record<string, any> {
|
||||
let q = query
|
||||
for (const param of params) {
|
||||
q = setParam(q, param.path, input[param.key])
|
||||
}
|
||||
return q
|
||||
}
|
||||
|
||||
function traverse (obj: Record<string, any>, path: string[]) {
|
||||
for (const key in obj) {
|
||||
const value = obj[key]
|
||||
if (typeof value === 'symbol') {
|
||||
params.push({ path: path.concat(key), key: value.description! })
|
||||
} else if (Array.isArray(value)) {
|
||||
for (var i = 0; i < value.length; i++) {
|
||||
traverse(value[i], path.concat(key, '' + i))
|
||||
}
|
||||
} else if (typeof value === 'object' && value !== null) {
|
||||
traverse(value, path.concat(key))
|
||||
} else {
|
||||
// do nothing
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function compileJson<TInput extends Record<string, any> = Record<string, any>> (query: Record<string, any>): t.compiledFunction<TInput> {
|
||||
const params: Array<{ path: string[], key: string }> = []
|
||||
traverse(query, [])
|
||||
|
||||
if (params.length === 0) {
|
||||
throw new Error('The query does not contain any use of `Q.params`')
|
||||
}
|
||||
|
||||
const stringified = JSON.stringify(query, (key, value) => {
|
||||
if (typeof value === 'symbol') {
|
||||
return `###${value.description!}###`
|
||||
} else if (key === '__proto__') {
|
||||
return undefined
|
||||
} else if (key === 'constructor' && typeof value === 'object' &&
|
||||
value !== null && value.prototype !== undefined) {
|
||||
return undefined
|
||||
} else {
|
||||
return value
|
||||
}
|
||||
})
|
||||
|
||||
return function (input: TInput): Record<string, any> {
|
||||
const q = JSON.parse(stringified)
|
||||
for (const param of params) {
|
||||
setParam2(q, param.path, input[param.key])
|
||||
}
|
||||
return q
|
||||
}
|
||||
|
||||
function traverse (obj: Record<string, any>, path: string[]) {
|
||||
for (const key in obj) {
|
||||
const value = obj[key]
|
||||
if (typeof value === 'symbol') {
|
||||
params.push({ path: path.concat(key), key: value.description! })
|
||||
} else if (Array.isArray(value)) {
|
||||
for (var i = 0; i < value.length; i++) {
|
||||
traverse(value[i], path.concat(key, '' + i))
|
||||
}
|
||||
} else if (typeof value === 'object' && value !== null) {
|
||||
traverse(value, path.concat(key))
|
||||
} else {
|
||||
// do nothing
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function match (key: string, val: MultiType | Symbol): { match: Record<string, MultiType> }
|
||||
export function match (key: string, val: MultiType | Symbol, opts: T.MatchQuery): { match: Record<string, T.MatchQuery> }
|
||||
export function match (key: string, val: (MultiType | Symbol)[]): { match: Record<string, MultiType> }[]
|
||||
export function match (key: string, val: (MultiType | Symbol)[], opts: T.MatchQuery): { match: Record<string, T.MatchQuery> }[]
|
||||
export function match (key: string, val: any, opts?: T.MatchQuery): any {
|
||||
return generateQueryObject('match', key, val, opts)
|
||||
}
|
||||
|
||||
export function matchPhrase (key: string, val: string | Symbol): { match_phrase: Record<string, string> }
|
||||
export function matchPhrase (key: string, val: string | Symbol, opts: T.MatchPhraseQuery): { match_phrase: Record<string, T.MatchPhraseQuery> }
|
||||
export function matchPhrase (key: string, val: (string | Symbol)[]): { match_phrase: Record<string, string> }[]
|
||||
export function matchPhrase (key: string, val: (string | Symbol)[], opts: T.MatchPhraseQuery): { match_phrase: Record<string, T.MatchPhraseQuery> }[]
|
||||
export function matchPhrase (key: string, val: any, opts?: T.MatchPhraseQuery): any {
|
||||
return generateQueryObject('match_phrase', key, val, opts)
|
||||
}
|
||||
|
||||
export function matchPhrasePrefix (key: string, val: string | Symbol): { match_phrase_prefix: Record<string, string> }
|
||||
export function matchPhrasePrefix (key: string, val: string | Symbol, opts: T.MatchPhrasePrefixQuery): { match_phrase_prefix: Record<string, T.MatchPhrasePrefixQuery> }
|
||||
export function matchPhrasePrefix (key: string, val: (string | Symbol)[]): { match_phrase_prefix: Record<string, string> }[]
|
||||
export function matchPhrasePrefix (key: string, val: (string | Symbol)[], opts: T.MatchPhrasePrefixQuery): { match_phrase_prefix: Record<string, T.MatchPhrasePrefixQuery> }[]
|
||||
export function matchPhrasePrefix (key: string, val: any, opts?: T.MatchPhrasePrefixQuery): any {
|
||||
return generateQueryObject('match_phrase_prefix', key, val, opts)
|
||||
}
|
||||
|
||||
export function multiMatch (keys: string[], val: string | Symbol, opts?: T.MultiMatchQuery): { multi_match: T.MultiMatchQuery } {
|
||||
return {
|
||||
multi_match: {
|
||||
// @ts-expect-error
|
||||
query: val,
|
||||
fields: keys,
|
||||
...opts
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function matchAll (opts?: T.MatchAllQuery): { match_all: T.MatchAllQuery } {
|
||||
return { match_all: { ...opts } }
|
||||
}
|
||||
|
||||
export function matchNone (): { match_none: {} } {
|
||||
return { match_none: {} }
|
||||
}
|
||||
|
||||
export function common (key: string, val: string | Symbol): { common: Record<string, string> }
|
||||
export function common (key: string, val: string | Symbol, opts: T.CommonTermsQuery): { common: Record<string, T.CommonTermsQuery> }
|
||||
export function common (key: string, val: (string | Symbol)[]): { common: Record<string, string> }[]
|
||||
export function common (key: string, val: (string | Symbol)[], opts: T.CommonTermsQuery): { common: Record<string, T.CommonTermsQuery> }[]
|
||||
export function common (key: string, val: any, opts?: T.CommonTermsQuery): any {
|
||||
return generateQueryObject('common', key, val, opts)
|
||||
}
|
||||
|
||||
export function queryString (val: string | Symbol, opts: T.QueryStringQuery): { query_string: T.QueryStringQuery } {
|
||||
return {
|
||||
query_string: {
|
||||
// @ts-expect-error
|
||||
query: val,
|
||||
...opts
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function simpleQueryString (val: string | Symbol, opts: T.SimpleQueryStringQuery): { simple_query_string: T.SimpleQueryStringQuery } {
|
||||
return {
|
||||
simple_query_string: {
|
||||
// @ts-expect-error
|
||||
query: val,
|
||||
...opts
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function term (key: string, val: MultiType | Symbol): { term: Record<string, MultiType> }
|
||||
export function term (key: string, val: MultiType | Symbol, opts: T.TermQuery): { term: Record<string, T.TermQuery> }
|
||||
export function term (key: string, val: (MultiType | Symbol)[]): { terms: Record<string, string[]> }
|
||||
export function term (key: string, val: (MultiType | Symbol)[], opts: T.TermsQuery): { term: Record<string, T.TermQuery> }[]
|
||||
export function term (key: string, val: any, opts?: any): any {
|
||||
if (Array.isArray(val) && opts == null) {
|
||||
return Q.terms(key, val)
|
||||
}
|
||||
return generateValueObject('term', key, val, opts)
|
||||
}
|
||||
|
||||
export function terms (key: string, val: (MultiType | Symbol)[]): { terms: Record<string, string[]> }
|
||||
export function terms (key: string, val: (MultiType | Symbol)[], opts: T.TermsQuery): { terms: Record<string, T.TermsQuery> }
|
||||
export function terms (key: string, val: (MultiType | Symbol)[], opts?: any): any {
|
||||
if (opts == null) {
|
||||
return {
|
||||
terms: { [key]: val }
|
||||
}
|
||||
}
|
||||
return { terms: opts }
|
||||
}
|
||||
|
||||
export function termsSet (key: string, val: (string | Symbol)[], opts?: T.TermsSetQuery): { terms_set: Record<string, T.TermsSetQuery> } {
|
||||
return {
|
||||
// @ts-ignore
|
||||
terms_set: {
|
||||
[key]: {
|
||||
terms: val,
|
||||
...opts
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function range (key: string, val: T.RangeQuery): { range: Record<string, T.RangeQuery> } {
|
||||
return { range: { [key]: val } }
|
||||
}
|
||||
|
||||
export function exists (key: string | Symbol): { exists: T.ExistsQuery }
|
||||
export function exists (key: (string | Symbol)[]): { exists: T.ExistsQuery }[]
|
||||
export function exists (key: any): any {
|
||||
if (Array.isArray(key)) {
|
||||
return key.map(k => exists(k))
|
||||
}
|
||||
return { exists: { field: key } }
|
||||
}
|
||||
|
||||
export function prefix (key: string, val: string | Symbol): { prefix: Record<string, string> }
|
||||
export function prefix (key: string, val: string | Symbol, opts: T.PrefixQuery): { prefix: Record<string, T.PrefixQuery> }
|
||||
export function prefix (key: string, val: (string | Symbol)[]): { prefix: Record<string, string> }[]
|
||||
export function prefix (key: string, val: (string | Symbol)[], opts: T.PrefixQuery): { prefix: Record<string, T.PrefixQuery> }
|
||||
export function prefix (key: string, val: any, opts?: any): any {
|
||||
return generateValueObject('prefix', key, val, opts)
|
||||
}
|
||||
|
||||
export function wildcard (key: string, val: string | Symbol): { wildcard: Record<string, string> }
|
||||
export function wildcard (key: string, val: string | Symbol, opts: T.WildcardQuery): { wildcard: Record<string, T.WildcardQuery> }
|
||||
export function wildcard (key: string, val: (string | Symbol)[]): { wildcard: Record<string, string> }[]
|
||||
export function wildcard (key: string, val: (string | Symbol)[], opts: T.WildcardQuery): { wildcard: Record<string, T.WildcardQuery> }
|
||||
export function wildcard (key: string, val: any, opts?: any): any {
|
||||
return generateValueObject('wildcard', key, val, opts)
|
||||
}
|
||||
|
||||
export function regexp (key: string, val: string | Symbol): { regexp: Record<string, string> }
|
||||
export function regexp (key: string, val: string | Symbol, opts: T.RegexpQuery): { regexp: Record<string, T.RegexpQuery> }
|
||||
export function regexp (key: string, val: (string | Symbol)[]): { regexp: Record<string, string> }[]
|
||||
export function regexp (key: string, val: (string | Symbol)[], opts: T.RegexpQuery): { regexp: Record<string, T.RegexpQuery> }
|
||||
export function regexp (key: string, val: any, opts?: any): any {
|
||||
return generateValueObject('regexp', key, val, opts)
|
||||
}
|
||||
|
||||
export function fuzzy (key: string, val: string | Symbol): { fuzzy: Record<string, string> }
|
||||
export function fuzzy (key: string, val: string | Symbol, opts: T.FuzzyQuery): { fuzzy: Record<string, T.FuzzyQuery> }
|
||||
export function fuzzy (key: string, val: (string | Symbol)[]): { fuzzy: Record<string, string> }[]
|
||||
export function fuzzy (key: string, val: (string | Symbol)[], opts: T.FuzzyQuery): { fuzzy: Record<string, T.FuzzyQuery> }
|
||||
export function fuzzy (key: string, val: any, opts?: any): any {
|
||||
return generateValueObject('fuzzy', key, val, opts)
|
||||
}
|
||||
|
||||
export function ids (key: string, val: (string | Symbol)[]): { ids: Record<string, T.IdsQuery> } {
|
||||
return {
|
||||
// @ts-expect-error
|
||||
ids: {
|
||||
[key]: {
|
||||
values: val
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
type AnyQueryWithArray = T.QueryContainer | BoolQuery | T.QueryContainer[] | BoolQuery[]
|
||||
type AnyQuery = T.QueryContainer | BoolQuery
|
||||
export function must (...queries: AnyQueryWithArray[]): { must: T.QueryContainer[] } {
|
||||
// @ts-ignore
|
||||
return { must: queries.flatMap(mergeableMust) }
|
||||
}
|
||||
|
||||
export function should (...queries: AnyQueryWithArray[]): { should: T.QueryContainer[] } {
|
||||
// @ts-ignore
|
||||
return { should: queries.flatMap(mergeableShould) }
|
||||
}
|
||||
|
||||
export function mustNot (...queries: AnyQueryWithArray[]): { must_not: T.QueryContainer[] } {
|
||||
// @ts-ignore
|
||||
return { must_not: queries.flatMap(mergeableMustNot) }
|
||||
}
|
||||
|
||||
export function filter (...queries: AnyQueryWithArray[]): { filter: T.QueryContainer[] } {
|
||||
// @ts-ignore
|
||||
return { filter: queries.flatMap(mergeableFilter) }
|
||||
}
|
||||
|
||||
export function bool (...queries: (T.QueryContainer | T.QueryContainer[] | BoolQuery)[]): BoolBlock {
|
||||
if (queries.length === 0) {
|
||||
return { bool: {} }
|
||||
}
|
||||
|
||||
// @ts-expect-error
|
||||
const defaultClause = queries.find(q => q && !!q.minimum_should_match) ? 'should' : 'must'
|
||||
const normalizedQueries: BoolQuery[] = queries
|
||||
.flat()
|
||||
.filter(val => {
|
||||
// filters empty objects/arrays as well
|
||||
if (typeof val === 'object' && val != null) {
|
||||
return Object.keys(val).length > 0
|
||||
}
|
||||
return !!val
|
||||
})
|
||||
.map(q => toBoolQuery(q, defaultClause))
|
||||
|
||||
const mustClauses: T.QueryContainer[] = []
|
||||
const mustNotClauses: T.QueryContainer[] = []
|
||||
const shouldClauses: T.QueryContainer[] = []
|
||||
const filterClauses: T.QueryContainer[] = []
|
||||
let minimum_should_match: number | string | null = null
|
||||
let _name: string | null = null
|
||||
|
||||
for (const query of normalizedQueries) {
|
||||
if (query.must) {
|
||||
mustClauses.push.apply(mustClauses, query.must)
|
||||
}
|
||||
if (query.must_not) {
|
||||
mustNotClauses.push.apply(mustNotClauses, query.must_not)
|
||||
}
|
||||
if (query.should) {
|
||||
shouldClauses.push.apply(shouldClauses, query.should)
|
||||
}
|
||||
if (query.filter) {
|
||||
filterClauses.push.apply(filterClauses, query.filter)
|
||||
}
|
||||
if (query._name) {
|
||||
if (_name !== null) {
|
||||
throw new Error('The query name has already been defined')
|
||||
}
|
||||
_name = query._name
|
||||
}
|
||||
if (query.minimum_should_match) {
|
||||
if (minimum_should_match !== null) {
|
||||
throw new Error('minimum_should_match has already been defined')
|
||||
}
|
||||
minimum_should_match = query.minimum_should_match
|
||||
}
|
||||
}
|
||||
|
||||
// If minimum_should_match is the same of should.length,
|
||||
// then all the should clauses are required.
|
||||
if (shouldClauses.length === minimum_should_match) {
|
||||
mustClauses.push.apply(mustClauses, shouldClauses)
|
||||
shouldClauses.length = 0
|
||||
minimum_should_match = null
|
||||
}
|
||||
|
||||
return {
|
||||
bool: booptimize({
|
||||
...(mustClauses.length && Q.must(...mustClauses)),
|
||||
...(mustNotClauses.length && Q.mustNot(...mustNotClauses)),
|
||||
...(shouldClauses.length && Q.should(...shouldClauses)),
|
||||
...(filterClauses.length && Q.filter(...filterClauses)),
|
||||
...(_name && { _name }),
|
||||
...(minimum_should_match && { minimum_should_match })
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
export function and (...queries: AnyQuery[]): BoolBlock {
|
||||
let query = queries[0]
|
||||
for (let i = 1; i < queries.length; i++) {
|
||||
query = andOp(query, queries[i])
|
||||
}
|
||||
return toBoolBlock(query)
|
||||
|
||||
function andOp (q1: AnyQuery, q2: AnyQuery): BoolBlock {
|
||||
const b1 = toBoolQuery(q1)
|
||||
const b2 = toBoolQuery(q2)
|
||||
if (b1.should == null && b2.should == null) {
|
||||
const mustClauses: T.QueryContainer[] = (b1.must || []).concat(b2.must || [])
|
||||
const mustNotClauses: T.QueryContainer[] = (b1.must_not || []).concat(b2.must_not || [])
|
||||
const filterClauses: T.QueryContainer[] = (b1.filter || []).concat(b2.filter || [])
|
||||
return {
|
||||
bool: booptimize({
|
||||
...(mustClauses.length && Q.must(...mustClauses)),
|
||||
...(mustNotClauses.length && Q.mustNot(...mustNotClauses)),
|
||||
...(filterClauses.length && Q.filter(...filterClauses))
|
||||
})
|
||||
}
|
||||
} else {
|
||||
const { must, ...clauses } = b1
|
||||
return {
|
||||
bool: booptimize({
|
||||
...(must == null ? Q.must(toBoolBlock(b2)) : Q.must(must, toBoolBlock(b2))),
|
||||
...clauses
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function or (...queries: AnyQuery[]): BoolBlock {
|
||||
return {
|
||||
bool: booptimize(Q.should(...queries))
|
||||
}
|
||||
}
|
||||
|
||||
export function not (q: T.QueryContainer): BoolBlock
|
||||
export function not (q: BoolQuery): BoolBlock
|
||||
export function not (q: any): BoolBlock {
|
||||
const b = toBoolQuery(q)
|
||||
|
||||
if (onlyMust(b)) {
|
||||
return {
|
||||
bool: booptimize(Q.mustNot(...b.must))
|
||||
}
|
||||
} else if (onlyMustNot(b)) {
|
||||
return {
|
||||
bool: booptimize(Q.must(...b.must_not))
|
||||
}
|
||||
} else {
|
||||
return {
|
||||
bool: booptimize(Q.mustNot(toBoolBlock(b)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function minShouldMatch (int: number): BoolQuery {
|
||||
return { minimum_should_match: int }
|
||||
}
|
||||
|
||||
export function name (queryName: string): BoolQuery {
|
||||
return { _name: queryName }
|
||||
}
|
||||
|
||||
export function nested (path: string, query: T.QueryContainer, opts: T.NestedQuery): { nested: T.NestedQuery } {
|
||||
return {
|
||||
nested: {
|
||||
path,
|
||||
...opts,
|
||||
...query
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function constantScore (query: T.QueryContainer, boost: number): { constant_score: T.ConstantScoreQuery } {
|
||||
return {
|
||||
constant_score: {
|
||||
filter: query,
|
||||
boost
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function disMax (queries: T.QueryContainer[], opts?: T.DisMaxQuery): { dis_max: T.DisMaxQuery } {
|
||||
return {
|
||||
dis_max: {
|
||||
...opts,
|
||||
queries: queries.flat()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function functionScore (function_score: T.FunctionScoreQuery): { function_score: T.FunctionScoreQuery } {
|
||||
return { function_score }
|
||||
}
|
||||
|
||||
export function boosting (boostOpts: T.BoostingQuery): { boosting: T.BoostingQuery } {
|
||||
return { boosting: boostOpts }
|
||||
}
|
||||
|
||||
export function sort (key: string | string[]): { sort: string[] }
|
||||
export function sort (key: string | string[], order: T.SortOrder): { sort: Record<string, T.SortOrder>[] }
|
||||
export function sort (key: string | string[], opts: T.Sort): { sort: Record<string, T.Sort>[] }
|
||||
export function sort (key: string | string[], opts?: any): any {
|
||||
if (opts == null) {
|
||||
return { sort: Array.isArray(key) ? key : [key] }
|
||||
}
|
||||
if (Array.isArray(key)) {
|
||||
return { sort: key.map(k => ({ [k]: opts })) }
|
||||
}
|
||||
return { sort: [{ [key]: opts }] }
|
||||
}
|
||||
|
||||
export function size (s: number | Symbol): { size: number } {
|
||||
// @ts-expect-error
|
||||
return { size: s }
|
||||
}
|
||||
|
||||
export function script (source: string): T.ScriptQuery
|
||||
export function script (source: string, lang: string): T.ScriptQuery
|
||||
export function script (source: string, params: Record<string, any>, lang?: string): T.ScriptQuery
|
||||
export function script (source: string, params?: any, lang?: any): T.ScriptQuery {
|
||||
if (typeof params === 'string') {
|
||||
return { script: { source, lang: params } }
|
||||
} else if (typeof params === 'object') {
|
||||
if (typeof lang === 'string') {
|
||||
return { script: { source, lang, params } }
|
||||
} else {
|
||||
return { script: { source, params } }
|
||||
}
|
||||
} else {
|
||||
return { script: source }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Tries to flat a bool query based on the content
|
||||
function booptimize (q: BoolQuery): BoolQuery {
|
||||
const clauses: t.BoolQueryOptions = {}
|
||||
|
||||
if (q.minimum_should_match !== undefined ||
|
||||
q.should !== undefined || q._name !== undefined) {
|
||||
return q
|
||||
}
|
||||
|
||||
if (q.must) {
|
||||
for (const c of q.must) {
|
||||
if (isBoolBlock(c)) {
|
||||
if (c.bool.should || c.bool._name) {
|
||||
clauses.must = clauses.must || []
|
||||
clauses.must.push(c)
|
||||
} else {
|
||||
// if we are in a BoolBlock and there is not a should clause
|
||||
// then we can "merge up" the other clauses safely
|
||||
if (c.bool.must) {
|
||||
clauses.must = clauses.must || []
|
||||
clauses.must.push.apply(clauses.must, c.bool.must)
|
||||
}
|
||||
|
||||
if (c.bool.must_not) {
|
||||
clauses.must_not = clauses.must_not || []
|
||||
clauses.must_not.push.apply(clauses.must_not, c.bool.must_not)
|
||||
}
|
||||
|
||||
if (c.bool.filter) {
|
||||
clauses.filter = clauses.filter || []
|
||||
clauses.filter.push.apply(clauses.filter, c.bool.filter)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
clauses.must = clauses.must || []
|
||||
clauses.must.push(c)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (q.filter) {
|
||||
for (const c of q.filter) {
|
||||
if (isBoolBlock(c)) {
|
||||
if (c.bool.should || c.bool.must_not || c.bool._name) {
|
||||
clauses.filter = clauses.filter || []
|
||||
clauses.filter.push(c)
|
||||
} else {
|
||||
// if there are must clauses and we are inside
|
||||
// a filter clause, we can safely move them to the upper
|
||||
// filter clause, since the score is not influenced
|
||||
if (c.bool.must) {
|
||||
clauses.filter = clauses.filter || []
|
||||
clauses.filter.push.apply(clauses.filter, c.bool.must)
|
||||
}
|
||||
|
||||
if (c.bool.filter) {
|
||||
clauses.filter = clauses.filter || []
|
||||
clauses.filter.push.apply(clauses.filter, c.bool.filter)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
clauses.filter = clauses.filter || []
|
||||
clauses.filter.push(c)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (q.must_not) {
|
||||
for (const c of q.must_not) {
|
||||
if (isBoolBlock(c)) {
|
||||
if (c.bool.should || c.bool.filter || c.bool._name) {
|
||||
clauses.must_not = clauses.must_not || []
|
||||
clauses.must_not.push(c)
|
||||
} else {
|
||||
// if 'c' is a BoolBlock and there are only must and must_not,
|
||||
// then we can swap them safely
|
||||
if (c.bool.must) {
|
||||
clauses.must_not = clauses.must_not || []
|
||||
clauses.must_not.push.apply(clauses.must_not, c.bool.must)
|
||||
}
|
||||
|
||||
if (c.bool.must_not) {
|
||||
clauses.must = clauses.must || []
|
||||
clauses.must.push.apply(clauses.must, c.bool.must_not)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
clauses.must_not = clauses.must_not || []
|
||||
clauses.must_not.push(c)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return clauses
|
||||
}
|
||||
|
||||
function generateQueryObject (queryType: string, key: string, val: string | Symbol, opts?: Record<string, any>): t.Condition
|
||||
function generateQueryObject (queryType: string, key: string, val: string[], opts?: Record<string, any>): t.Condition[]
|
||||
function generateQueryObject (queryType: string, key: string, val: any, opts?: Record<string, any>): any {
|
||||
if (Array.isArray(val)) {
|
||||
return val.map(v => generateQueryObject(queryType, key, v, opts))
|
||||
}
|
||||
if (opts === undefined) {
|
||||
return { [queryType]: { [key]: val } }
|
||||
}
|
||||
return {
|
||||
[queryType]: {
|
||||
[key]: {
|
||||
query: val,
|
||||
...opts
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function generateValueObject (queryType: string, key: string, val: string | Symbol, opts?: Record<string, any>): t.Condition
|
||||
function generateValueObject (queryType: string, key: string, val: string[], opts?: Record<string, any>): t.Condition[]
|
||||
function generateValueObject (queryType: string, key: string, val: any, opts?: Record<string, any>): any {
|
||||
if (Array.isArray(val)) {
|
||||
return val.map(v => generateValueObject(queryType, key, v, opts))
|
||||
}
|
||||
if (opts === undefined) {
|
||||
return { [queryType]: { [key]: val } }
|
||||
}
|
||||
return {
|
||||
[queryType]: {
|
||||
[key]: {
|
||||
value: val,
|
||||
...opts
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function isQuery (q: any): q is QueryBlock {
|
||||
return !!q.query
|
||||
}
|
||||
|
||||
function isBoolBlock (q: any): q is BoolBlock {
|
||||
return !!q.bool
|
||||
}
|
||||
|
||||
function isBoolQuery (q: any): q is BoolQuery {
|
||||
if (q.must !== undefined) return true
|
||||
if (q.should !== undefined) return true
|
||||
if (q.must_not !== undefined) return true
|
||||
if (q.filter !== undefined) return true
|
||||
if (q.minimum_should_match !== undefined) return true
|
||||
if (q._name !== undefined) return true
|
||||
return false
|
||||
}
|
||||
|
||||
function onlyShould (bool: BoolQuery): bool is t.ShouldClause {
|
||||
if (bool.must !== undefined) return false
|
||||
if (bool.must_not !== undefined) return false
|
||||
if (bool.filter !== undefined) return false
|
||||
if (bool.minimum_should_match !== undefined) return false
|
||||
if (bool._name !== undefined) return false
|
||||
return true
|
||||
}
|
||||
|
||||
function onlyMust (bool: BoolQuery): bool is t.MustClause {
|
||||
if (bool.should !== undefined) return false
|
||||
if (bool.must_not !== undefined) return false
|
||||
if (bool.filter !== undefined) return false
|
||||
if (bool.minimum_should_match !== undefined) return false
|
||||
if (bool._name !== undefined) return false
|
||||
return true
|
||||
}
|
||||
|
||||
function onlyMustNot (bool: BoolQuery): bool is t.MustNotClause {
|
||||
if (bool.should !== undefined) return false
|
||||
if (bool.must !== undefined) return false
|
||||
if (bool.filter !== undefined) return false
|
||||
if (bool.minimum_should_match !== undefined) return false
|
||||
if (bool._name !== undefined) return false
|
||||
return true
|
||||
}
|
||||
|
||||
function onlyFilter (bool: BoolQuery): bool is t.FilterClause {
|
||||
if (bool.should !== undefined) return false
|
||||
if (bool.must !== undefined) return false
|
||||
if (bool.must_not !== undefined) return false
|
||||
if (bool.minimum_should_match !== undefined) return false
|
||||
if (bool._name !== undefined) return false
|
||||
return true
|
||||
}
|
||||
|
||||
// for a given query it always return a bool block:
|
||||
// - if is a bool query returns the bool block
|
||||
// - if is a clause, wraps the query in a bool block
|
||||
// - if is condition, wraps the query into a must clause and then in a bool block
|
||||
function toBoolBlock (query: T.QueryContainer | BoolQuery): BoolBlock {
|
||||
if (isBoolQuery(query)) {
|
||||
return { bool: query }
|
||||
}
|
||||
|
||||
if (isBoolBlock(query)) {
|
||||
return query
|
||||
}
|
||||
|
||||
return { bool: { must: [query] } }
|
||||
}
|
||||
|
||||
// for a given query it always return a bool query options:
|
||||
// - if is a bool query returns the bool query options
|
||||
// - if is a clause, it returns it
|
||||
// - if is condition, wraps the query into a must clause and returns it
|
||||
type toBoolQueryDefault = 'must' | 'must_not' | 'should' | 'filter'
|
||||
function toBoolQuery (query: T.QueryContainer | BoolQuery, def: toBoolQueryDefault = 'must'): BoolQuery {
|
||||
if (isBoolQuery(query)) {
|
||||
return query
|
||||
}
|
||||
|
||||
if (isBoolBlock(query)) {
|
||||
if (query.bool._name) {
|
||||
return { [def]: [query] }
|
||||
}
|
||||
if (query.bool.minimum_should_match) {
|
||||
return { [def]: [query] }
|
||||
}
|
||||
return query.bool
|
||||
}
|
||||
|
||||
return { [def]: [query] }
|
||||
}
|
||||
|
||||
// the aim of this mergeable functions
|
||||
// is to reduce the depth of the query objects
|
||||
function mergeableMust (q: T.QueryContainer | BoolQuery): T.QueryContainer
|
||||
function mergeableMust (q: (T.QueryContainer | BoolQuery)[]): T.QueryContainer[]
|
||||
function mergeableMust (q: any): any {
|
||||
if (Array.isArray(q)) {
|
||||
return q.map(mergeableMust)
|
||||
}
|
||||
if (isBoolBlock(q)) {
|
||||
if (onlyMust(q.bool)) {
|
||||
return q.bool.must
|
||||
} else {
|
||||
return q
|
||||
}
|
||||
} else if (isBoolQuery(q)) {
|
||||
if (onlyMust(q)) {
|
||||
return q.must
|
||||
} else {
|
||||
return { bool: q }
|
||||
}
|
||||
} else {
|
||||
return q
|
||||
}
|
||||
}
|
||||
|
||||
function mergeableShould (q: T.QueryContainer | BoolQuery): T.QueryContainer
|
||||
function mergeableShould (q: (T.QueryContainer | BoolQuery)[]): T.QueryContainer[]
|
||||
function mergeableShould (q: any): any {
|
||||
if (Array.isArray(q)) {
|
||||
return q.map(mergeableShould)
|
||||
}
|
||||
if (isBoolBlock(q)) {
|
||||
if (onlyShould(q.bool)) {
|
||||
return q.bool.should
|
||||
} else {
|
||||
return q
|
||||
}
|
||||
} else if (isBoolQuery(q)) {
|
||||
if (onlyShould(q)) {
|
||||
return q.should
|
||||
} else {
|
||||
return { bool: q }
|
||||
}
|
||||
} else {
|
||||
return q
|
||||
}
|
||||
}
|
||||
|
||||
function mergeableMustNot (q: T.QueryContainer | BoolQuery): T.QueryContainer
|
||||
function mergeableMustNot (q: (T.QueryContainer | BoolQuery)[]): T.QueryContainer[]
|
||||
function mergeableMustNot (q: any): any {
|
||||
if (Array.isArray(q)) {
|
||||
return q.map(mergeableMustNot)
|
||||
}
|
||||
|
||||
if (isBoolBlock(q)) {
|
||||
if (onlyMustNot(q.bool)) {
|
||||
return q.bool.must_not
|
||||
} else {
|
||||
return q
|
||||
}
|
||||
} else if (isBoolQuery(q)) {
|
||||
if (onlyMustNot(q)) {
|
||||
return q.must_not
|
||||
} else {
|
||||
return { bool: q }
|
||||
}
|
||||
} else {
|
||||
return q
|
||||
}
|
||||
}
|
||||
|
||||
function mergeableFilter (q: T.QueryContainer | BoolQuery): T.QueryContainer
|
||||
function mergeableFilter (q: (T.QueryContainer | BoolQuery)[]): T.QueryContainer[]
|
||||
function mergeableFilter (q: any): any {
|
||||
if (Array.isArray(q)) {
|
||||
return q.map(mergeableFilter)
|
||||
}
|
||||
if (isBoolBlock(q)) {
|
||||
if (onlyFilter(q.bool)) {
|
||||
return q.bool.filter
|
||||
} else {
|
||||
return q
|
||||
}
|
||||
} else if (isBoolQuery(q)) {
|
||||
if (onlyFilter(q)) {
|
||||
return q.filter
|
||||
} else {
|
||||
return { bool: q }
|
||||
}
|
||||
} else {
|
||||
return q
|
||||
}
|
||||
}
|
||||
|
||||
// code from https://github.com/fwilkerson/clean-set
|
||||
function setParam (source: Record<string, any>, keys: string[], update: any) {
|
||||
const next = copy(source)
|
||||
let last = next
|
||||
|
||||
for (let i = 0, len = keys.length; i < len; i++) {
|
||||
// @ts-ignore
|
||||
last = last[keys[i]] = i === len - 1 ? update : copy(last[keys[i]])
|
||||
}
|
||||
|
||||
return next
|
||||
|
||||
function copy (source: Record<string, any> | any[]): Record<string, any> | any[] {
|
||||
const to = source && !!source.pop ? [] : {}
|
||||
for (const i in source) {
|
||||
// @ts-ignore
|
||||
to[i] = source[i]
|
||||
}
|
||||
return to
|
||||
}
|
||||
}
|
||||
|
||||
// code from https://github.com/lukeed/dset
|
||||
function setParam2 (obj: Record<string, any>, keys: string[], val: any) {
|
||||
let x
|
||||
for (let i = 0, len = keys.length; i < len; i++) {
|
||||
x = obj[keys[i]]
|
||||
if (i === len - 1) {
|
||||
obj = obj[keys[i]] = val
|
||||
} else if (x != null) {
|
||||
obj = obj[keys[i]] = x
|
||||
} else if (!!~keys[i + 1].indexOf('.') || !(+keys[i + 1] > -1)) {
|
||||
obj = obj[keys[i]] = {}
|
||||
} else {
|
||||
obj = obj[keys[i]] = []
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default Q
|
||||
71
dsl/src/types.ts
Normal file
71
dsl/src/types.ts
Normal file
@ -0,0 +1,71 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
/* eslint camelcase: 0 */
|
||||
/* eslint no-use-before-define: 0 */
|
||||
|
||||
export type Condition = Record<string, any>
|
||||
|
||||
export interface QueryBlock {
|
||||
query: Record<string, any>
|
||||
}
|
||||
|
||||
export interface MustClause {
|
||||
must: Condition[]
|
||||
}
|
||||
|
||||
export interface MustNotClause {
|
||||
must_not: Condition[]
|
||||
}
|
||||
|
||||
export interface ShouldClause {
|
||||
should: Condition[]
|
||||
minimum_should_match?: number
|
||||
}
|
||||
|
||||
export interface FilterClause {
|
||||
filter: Condition[]
|
||||
}
|
||||
|
||||
export interface BoolQuery<TOptions = BoolQueryOptions> {
|
||||
query: {
|
||||
bool: TOptions
|
||||
}
|
||||
}
|
||||
|
||||
export interface BoolBlock {
|
||||
bool: BoolQueryOptions
|
||||
}
|
||||
|
||||
export interface BoolQueryOptions {
|
||||
must?: Condition[] | BoolBlock[]
|
||||
must_not?: Condition[] | BoolBlock[]
|
||||
should?: Condition[] | BoolBlock[]
|
||||
filter?: Condition[] | BoolBlock[]
|
||||
minimum_should_match?: number
|
||||
_name?: string
|
||||
}
|
||||
|
||||
export type AnyQuery = QueryBlock | BoolQueryOptions | Condition | Condition[]
|
||||
|
||||
export type AnyBoolQuery = BoolQuery | BoolQueryOptions | Condition | Condition[]
|
||||
|
||||
export type Aggregation = Record<string, any>
|
||||
|
||||
export type compiledFunction<TInput> = (input: TInput) => Record<string, any>;
|
||||
20
dsl/tsconfig.json
Normal file
20
dsl/tsconfig.json
Normal file
@ -0,0 +1,20 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"moduleResolution": "node",
|
||||
"declaration": true,
|
||||
"target": "es2019",
|
||||
"module": "commonjs",
|
||||
"outDir": "lib",
|
||||
"pretty": true,
|
||||
"noEmitOnError": true,
|
||||
"experimentalDecorators": false,
|
||||
"sourceMap": true,
|
||||
"emitDecoratorMetadata": false,
|
||||
"strict": true,
|
||||
"esModuleInterop": true,
|
||||
"removeComments": true,
|
||||
"noUnusedLocals": true
|
||||
},
|
||||
"exclude": ["examples"],
|
||||
"include": ["./src/*.ts"]
|
||||
}
|
||||
11
package.json
11
package.json
@ -33,10 +33,14 @@
|
||||
"test:coverage-100": "tap test/{unit,acceptance}/{*,**/*}.test.js --coverage --100 --nyc-arg=\"--exclude=api\"",
|
||||
"test:coverage-report": "tap test/{unit,acceptance}/{*,**/*}.test.js --coverage --nyc-arg=\"--exclude=api\" && nyc report --reporter=text-lcov > coverage.lcov",
|
||||
"test:coverage-ui": "tap test/{unit,acceptance}/{*,**/*}.test.js --coverage --coverage-report=html --nyc-arg=\"--exclude=api\"",
|
||||
"test:dsl": "npm run lint:dsl && npm run build:ts && tap --ts test/dsl/*.test.ts",
|
||||
"lint": "standard",
|
||||
"lint:dsl": "standardx --parser @typescript-eslint/parser --plugin @typescript-eslint/eslint-plugin dsl/src/*.ts dsl/examples/*.ts test/dsl/*.ts",
|
||||
"lint:fix": "standard --fix",
|
||||
"license-checker": "license-checker --production --onlyAllow='MIT;Apache-2.0;Apache1.1;ISC;BSD-3-Clause;BSD-2-Clause'",
|
||||
"build-esm": "npx gen-esm-wrapper . index.mjs && standard --fix index.mjs"
|
||||
"build:esm": "npx gen-esm-wrapper . index.mjs && standard --fix index.mjs",
|
||||
"build:ts": "rimraf dsl/lib/* && tsc --project dsl/tsconfig.json",
|
||||
"prepublish": "npm run build:ts"
|
||||
},
|
||||
"author": {
|
||||
"name": "Tomas Della Vedova",
|
||||
@ -49,9 +53,10 @@
|
||||
"devDependencies": {
|
||||
"@sinonjs/fake-timers": "github:sinonjs/fake-timers#0bfffc1",
|
||||
"@types/node": "^12.6.2",
|
||||
"@typescript-eslint/eslint-plugin": "^4.0.1",
|
||||
"@typescript-eslint/parser": "^4.0.1",
|
||||
"convert-hrtime": "^3.0.0",
|
||||
"dedent": "^0.7.0",
|
||||
"deepmerge": "^4.0.0",
|
||||
"dezalgo": "^1.0.3",
|
||||
"fast-deep-equal": "^3.1.1",
|
||||
"into-stream": "^5.1.1",
|
||||
@ -67,9 +72,11 @@
|
||||
"simple-statistics": "^7.0.2",
|
||||
"split2": "^3.1.1",
|
||||
"standard": "^13.0.2",
|
||||
"standardx": "^5.0.0",
|
||||
"stoppable": "^1.1.0",
|
||||
"tap": "^14.4.1",
|
||||
"tsd": "^0.13.1",
|
||||
"typescript": "^4.0.2",
|
||||
"workq": "^2.1.0",
|
||||
"xmlbuilder2": "^2.1.2"
|
||||
},
|
||||
|
||||
1678
test/dsl/aggregation.test.ts
Normal file
1678
test/dsl/aggregation.test.ts
Normal file
File diff suppressed because it is too large
Load Diff
782
test/dsl/bool-optimize.test.ts
Normal file
782
test/dsl/bool-optimize.test.ts
Normal file
@ -0,0 +1,782 @@
|
||||
'use strict'
|
||||
|
||||
import { test } from 'tap'
|
||||
import { Q } from '../../dsl'
|
||||
|
||||
test('must only query', t => {
|
||||
const query = Q.bool(
|
||||
Q.match('1', '2'),
|
||||
Q.term('3', '4'),
|
||||
Q.must(
|
||||
Q.match('5', '6'),
|
||||
Q.term('7', '8')
|
||||
),
|
||||
Q.bool(
|
||||
Q.must(
|
||||
Q.match('9', '10'),
|
||||
Q.term('11', '12')
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
t.deepEqual(query, {
|
||||
bool: {
|
||||
must: [
|
||||
{ match: { 1: '2' } },
|
||||
{ term: { 3: '4' } },
|
||||
{ match: { 5: '6' } },
|
||||
{ term: { 7: '8' } },
|
||||
{ match: { 9: '10' } },
|
||||
{ term: { 11: '12' } }
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('must and must_not query', t => {
|
||||
const query = Q.bool(
|
||||
Q.match('1', '2'),
|
||||
Q.term('3', '4'),
|
||||
Q.mustNot(
|
||||
Q.match('5', '6'),
|
||||
Q.term('7', '8')
|
||||
),
|
||||
Q.bool(
|
||||
Q.must(
|
||||
Q.match('9', '10'),
|
||||
Q.term('11', '12')
|
||||
)
|
||||
),
|
||||
Q.bool(
|
||||
Q.mustNot(
|
||||
Q.match('13', '14'),
|
||||
Q.term('15', '16')
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
t.deepEqual(query, {
|
||||
bool: {
|
||||
must: [
|
||||
{ match: { 1: '2' } },
|
||||
{ term: { 3: '4' } },
|
||||
{ match: { 9: '10' } },
|
||||
{ term: { 11: '12' } }
|
||||
],
|
||||
must_not: [
|
||||
{ match: { 5: '6' } },
|
||||
{ term: { 7: '8' } },
|
||||
{ match: { 13: '14' } },
|
||||
{ term: { 15: '16' } }
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('must and must_not query (mixed and nested)', t => {
|
||||
const query = Q.bool(
|
||||
Q.match('1', '2'),
|
||||
Q.term('3', '4'),
|
||||
Q.mustNot(
|
||||
Q.match('5', '6'),
|
||||
Q.term('7', '8')
|
||||
),
|
||||
Q.bool(
|
||||
Q.must(
|
||||
Q.match('9', '10'),
|
||||
Q.term('11', '12')
|
||||
)
|
||||
),
|
||||
Q.bool(
|
||||
Q.mustNot(
|
||||
Q.match('13', '14')
|
||||
),
|
||||
Q.must(
|
||||
Q.term('15', '16')
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
t.deepEqual(query, {
|
||||
bool: {
|
||||
must: [
|
||||
{ match: { 1: '2' } },
|
||||
{ term: { 3: '4' } },
|
||||
{ match: { 9: '10' } },
|
||||
{ term: { 11: '12' } },
|
||||
{ term: { 15: '16' } }
|
||||
],
|
||||
must_not: [
|
||||
{ match: { 5: '6' } },
|
||||
{ term: { 7: '8' } },
|
||||
{ match: { 13: '14' } }
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('must and should query', t => {
|
||||
const query = Q.bool(
|
||||
Q.must(
|
||||
Q.match('1', '2'),
|
||||
Q.term('3', '4')
|
||||
),
|
||||
Q.should(
|
||||
Q.match('5', '6'),
|
||||
Q.term('7', '8')
|
||||
)
|
||||
)
|
||||
|
||||
t.deepEqual(query, {
|
||||
bool: {
|
||||
must: [
|
||||
{ match: { 1: '2' } },
|
||||
{ term: { 3: '4' } }
|
||||
],
|
||||
should: [
|
||||
{ match: { 5: '6' } },
|
||||
{ term: { 7: '8' } }
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('must and should query (nested) / 1', t => {
|
||||
const query = Q.bool(
|
||||
Q.must(
|
||||
Q.match('1', '2'),
|
||||
Q.term('3', '4')
|
||||
),
|
||||
Q.must(
|
||||
Q.bool(
|
||||
Q.must(
|
||||
Q.match('5', '6'),
|
||||
Q.term('7', '8')
|
||||
),
|
||||
Q.should(
|
||||
Q.match('9', '10'),
|
||||
Q.term('11', '12')
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
t.deepEqual(query, {
|
||||
bool: {
|
||||
must: [
|
||||
{ match: { 1: '2' } },
|
||||
{ term: { 3: '4' } },
|
||||
{
|
||||
bool: {
|
||||
must: [
|
||||
{ match: { 5: '6' } },
|
||||
{ term: { 7: '8' } }
|
||||
],
|
||||
should: [
|
||||
{ match: { 9: '10' } },
|
||||
{ term: { 11: '12' } }
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('must and should query (nested) / 2', t => {
|
||||
const query = Q.bool(
|
||||
Q.match('1', '2'),
|
||||
Q.term('3', '4'),
|
||||
Q.must(
|
||||
Q.bool(
|
||||
Q.must(
|
||||
Q.match('5', '6'),
|
||||
Q.term('7', '8')
|
||||
),
|
||||
Q.should(
|
||||
Q.match('9', '10'),
|
||||
Q.term('11', '12')
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
t.deepEqual(query, {
|
||||
bool: {
|
||||
must: [
|
||||
{ match: { 1: '2' } },
|
||||
{ term: { 3: '4' } },
|
||||
{
|
||||
bool: {
|
||||
must: [
|
||||
{ match: { 5: '6' } },
|
||||
{ term: { 7: '8' } }
|
||||
],
|
||||
should: [
|
||||
{ match: { 9: '10' } },
|
||||
{ term: { 11: '12' } }
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('must and filter query / 1', t => {
|
||||
const query = Q.bool(
|
||||
Q.match('1', '2'),
|
||||
Q.term('3', '4'),
|
||||
Q.filter(
|
||||
Q.bool(
|
||||
Q.must(
|
||||
Q.match('5', '6'),
|
||||
Q.term('7', '8')
|
||||
),
|
||||
Q.filter(
|
||||
Q.match('9', '10'),
|
||||
Q.term('11', '12')
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
t.deepEqual(query, {
|
||||
bool: {
|
||||
must: [
|
||||
{ match: { 1: '2' } },
|
||||
{ term: { 3: '4' } }
|
||||
],
|
||||
filter: [
|
||||
{ match: { 5: '6' } },
|
||||
{ term: { 7: '8' } },
|
||||
{ match: { 9: '10' } },
|
||||
{ term: { 11: '12' } }
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('must and filter query / 2', t => {
|
||||
const query = Q.bool(
|
||||
Q.match('1', '2'),
|
||||
Q.term('3', '4'),
|
||||
Q.filter(
|
||||
Q.bool(
|
||||
Q.match('5', '6'),
|
||||
Q.term('7', '8'),
|
||||
Q.filter(
|
||||
Q.match('9', '10'),
|
||||
Q.term('11', '12')
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
t.deepEqual(query, {
|
||||
bool: {
|
||||
must: [
|
||||
{ match: { 1: '2' } },
|
||||
{ term: { 3: '4' } }
|
||||
],
|
||||
filter: [
|
||||
{ match: { 5: '6' } },
|
||||
{ term: { 7: '8' } },
|
||||
{ match: { 9: '10' } },
|
||||
{ term: { 11: '12' } }
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('all but should query / 1', t => {
|
||||
const query = Q.bool(
|
||||
Q.match('1', '2'),
|
||||
Q.term('3', '4'),
|
||||
Q.mustNot(
|
||||
Q.match('5', '6'),
|
||||
Q.term('7', '8')
|
||||
),
|
||||
Q.bool(
|
||||
Q.filter(
|
||||
Q.match('9', '10'),
|
||||
Q.term('11', '12')
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
t.deepEqual(query, {
|
||||
bool: {
|
||||
must: [
|
||||
{ match: { 1: '2' } },
|
||||
{ term: { 3: '4' } }
|
||||
],
|
||||
must_not: [
|
||||
{ match: { 5: '6' } },
|
||||
{ term: { 7: '8' } }
|
||||
],
|
||||
filter: [
|
||||
{ match: { 9: '10' } },
|
||||
{ term: { 11: '12' } }
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('all but should query / 2', t => {
|
||||
const query = Q.bool(
|
||||
Q.match('1', '2'),
|
||||
Q.term('3', '4'),
|
||||
Q.bool(
|
||||
Q.mustNot(
|
||||
Q.match('5', '6'),
|
||||
Q.term('7', '8')
|
||||
),
|
||||
Q.filter(
|
||||
Q.match('9', '10'),
|
||||
Q.term('11', '12')
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
t.deepEqual(query, {
|
||||
bool: {
|
||||
must: [
|
||||
{ match: { 1: '2' } },
|
||||
{ term: { 3: '4' } }
|
||||
],
|
||||
must_not: [
|
||||
{ match: { 5: '6' } },
|
||||
{ term: { 7: '8' } }
|
||||
],
|
||||
filter: [
|
||||
{ match: { 9: '10' } },
|
||||
{ term: { 11: '12' } }
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('all but should query / 3', t => {
|
||||
const query = Q.bool(
|
||||
Q.mustNot(
|
||||
Q.match('1', '2'),
|
||||
Q.term('3', '4')
|
||||
),
|
||||
Q.must(
|
||||
Q.bool(
|
||||
Q.mustNot(
|
||||
Q.match('5', '6'),
|
||||
Q.term('7', '8')
|
||||
),
|
||||
Q.filter(
|
||||
Q.match('9', '10'),
|
||||
Q.term('11', '12')
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
t.deepEqual(query, {
|
||||
bool: {
|
||||
must_not: [
|
||||
{ match: { 5: '6' } },
|
||||
{ term: { 7: '8' } },
|
||||
{ match: { 1: '2' } },
|
||||
{ term: { 3: '4' } }
|
||||
],
|
||||
filter: [
|
||||
{ match: { 9: '10' } },
|
||||
{ term: { 11: '12' } }
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('all but should query / 4', t => {
|
||||
const query = Q.bool(
|
||||
Q.must(
|
||||
Q.match('17', '18'),
|
||||
Q.term('19', '20')
|
||||
),
|
||||
Q.mustNot(
|
||||
Q.match('1', '2'),
|
||||
Q.term('3', '4')
|
||||
),
|
||||
Q.must(
|
||||
Q.bool(
|
||||
Q.must(
|
||||
Q.match('13', '14'),
|
||||
Q.term('15', '16')
|
||||
),
|
||||
Q.mustNot(
|
||||
Q.match('5', '6'),
|
||||
Q.term('7', '8')
|
||||
),
|
||||
Q.filter(
|
||||
Q.match('9', '10'),
|
||||
Q.term('11', '12')
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
t.deepEqual(query, {
|
||||
bool: {
|
||||
must: [
|
||||
{ match: { 17: '18' } },
|
||||
{ term: { 19: '20' } },
|
||||
{ match: { 13: '14' } },
|
||||
{ term: { 15: '16' } }
|
||||
],
|
||||
must_not: [
|
||||
{ match: { 5: '6' } },
|
||||
{ term: { 7: '8' } },
|
||||
{ match: { 1: '2' } },
|
||||
{ term: { 3: '4' } }
|
||||
],
|
||||
filter: [
|
||||
{ match: { 9: '10' } },
|
||||
{ term: { 11: '12' } }
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('filter with should', t => {
|
||||
const query = Q.bool(
|
||||
Q.filter(
|
||||
Q.bool(
|
||||
Q.must(
|
||||
Q.match('1', '2'),
|
||||
Q.term('3', '4')
|
||||
),
|
||||
Q.should(
|
||||
Q.match('5', '6'),
|
||||
Q.term('7', '8')
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
t.deepEqual(query, {
|
||||
bool: {
|
||||
filter: [
|
||||
{
|
||||
bool: {
|
||||
must: [
|
||||
{ match: { 1: '2' } },
|
||||
{ term: { 3: '4' } }
|
||||
],
|
||||
should: [
|
||||
{ match: { 5: '6' } },
|
||||
{ term: { 7: '8' } }
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('nested with only should', t => {
|
||||
const query = Q.bool(
|
||||
Q.should(
|
||||
Q.bool(
|
||||
Q.should(
|
||||
Q.match('1', '2'),
|
||||
Q.term('3', '4')
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
t.deepEqual(query, {
|
||||
bool: {
|
||||
should: [
|
||||
{ match: { 1: '2' } },
|
||||
{ term: { 3: '4' } }
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('nested with only should and minimum_should_match / 1', t => {
|
||||
const query = Q.bool(
|
||||
Q.should(
|
||||
Q.bool(
|
||||
Q.should(
|
||||
Q.match('1', '2'),
|
||||
Q.term('3', '4')
|
||||
),
|
||||
Q.minShouldMatch(1)
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
t.deepEqual(query, {
|
||||
bool: {
|
||||
should: [{
|
||||
bool: {
|
||||
should: [
|
||||
{ match: { 1: '2' } },
|
||||
{ term: { 3: '4' } }
|
||||
],
|
||||
minimum_should_match: 1
|
||||
}
|
||||
}]
|
||||
}
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('nested with only should and minimum_should_match / 2', t => {
|
||||
const query = Q.bool(
|
||||
Q.bool(
|
||||
Q.should(
|
||||
Q.match('1', '2'),
|
||||
Q.term('3', '4')
|
||||
),
|
||||
Q.minShouldMatch(1)
|
||||
),
|
||||
Q.should(Q.match('5', '6'))
|
||||
)
|
||||
|
||||
t.deepEqual(query, {
|
||||
bool: {
|
||||
must: [{
|
||||
bool: {
|
||||
should: [
|
||||
{ match: { 1: '2' } },
|
||||
{ term: { 3: '4' } }
|
||||
],
|
||||
minimum_should_match: 1
|
||||
}
|
||||
}],
|
||||
should: [{
|
||||
match: { 5: '6' }
|
||||
}]
|
||||
}
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('nested with should and other clause', t => {
|
||||
const query = Q.bool(
|
||||
Q.should(
|
||||
Q.bool(
|
||||
Q.should(
|
||||
Q.match('1', '2'),
|
||||
Q.term('3', '4')
|
||||
),
|
||||
Q.mustNot(
|
||||
Q.match('5', '6'),
|
||||
Q.term('7', '8')
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
t.deepEqual(query, {
|
||||
bool: {
|
||||
should: [{
|
||||
bool: {
|
||||
should: [
|
||||
{ match: { 1: '2' } },
|
||||
{ term: { 3: '4' } }
|
||||
],
|
||||
must_not: [
|
||||
{ match: { 5: '6' } },
|
||||
{ term: { 7: '8' } }
|
||||
]
|
||||
}
|
||||
}]
|
||||
}
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('nested with only should', t => {
|
||||
const query = Q.bool(
|
||||
Q.should(
|
||||
Q.match('1', '2')
|
||||
),
|
||||
Q.should(
|
||||
Q.term('3', '4')
|
||||
)
|
||||
)
|
||||
|
||||
t.deepEqual(query, {
|
||||
bool: {
|
||||
should: [
|
||||
{ match: { 1: '2' } },
|
||||
{ term: { 3: '4' } }
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('nested with only should and minimum_should_match', t => {
|
||||
const query = Q.bool(
|
||||
Q.should(
|
||||
Q.match('1', '2')
|
||||
),
|
||||
Q.should(
|
||||
Q.term('3', '4')
|
||||
),
|
||||
Q.minShouldMatch(1)
|
||||
)
|
||||
|
||||
t.deepEqual(query, {
|
||||
bool: {
|
||||
should: [
|
||||
{ match: { 1: '2' } },
|
||||
{ term: { 3: '4' } }
|
||||
],
|
||||
minimum_should_match: 1
|
||||
}
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('Should not merge up named queries / 1', t => {
|
||||
const query = Q.bool(
|
||||
Q.match('1', '2'),
|
||||
Q.term('3', '4'),
|
||||
Q.must(
|
||||
Q.match('5', '6'),
|
||||
Q.term('7', '8')
|
||||
),
|
||||
Q.bool(
|
||||
Q.name('test'),
|
||||
Q.must(
|
||||
Q.match('9', '10'),
|
||||
Q.term('11', '12')
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
t.deepEqual(query, {
|
||||
bool: {
|
||||
must: [
|
||||
{ match: { 1: '2' } },
|
||||
{ term: { 3: '4' } },
|
||||
{ match: { 5: '6' } },
|
||||
{ term: { 7: '8' } },
|
||||
{
|
||||
bool: {
|
||||
_name: 'test',
|
||||
must: [
|
||||
{ match: { 9: '10' } },
|
||||
{ term: { 11: '12' } }
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('Should not merge up named queries / 2', t => {
|
||||
const query = Q.bool(
|
||||
Q.match('1', '2'),
|
||||
Q.term('3', '4'),
|
||||
Q.must(
|
||||
Q.match('5', '6'),
|
||||
Q.term('7', '8'),
|
||||
Q.bool(
|
||||
Q.name('test'),
|
||||
Q.must(
|
||||
Q.match('9', '10'),
|
||||
Q.term('11', '12')
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
t.deepEqual(query, {
|
||||
bool: {
|
||||
must: [
|
||||
{ match: { 1: '2' } },
|
||||
{ term: { 3: '4' } },
|
||||
{ match: { 5: '6' } },
|
||||
{ term: { 7: '8' } },
|
||||
{
|
||||
bool: {
|
||||
_name: 'test',
|
||||
must: [
|
||||
{ match: { 9: '10' } },
|
||||
{ term: { 11: '12' } }
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('_name defined twice', t => {
|
||||
try {
|
||||
Q.bool(
|
||||
Q.name('foo'),
|
||||
Q.name('bar')
|
||||
)
|
||||
t.fail('should throw')
|
||||
} catch (err) {
|
||||
t.is(err.message, 'The query name has already been defined')
|
||||
}
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('minimum_should_match defined twice', t => {
|
||||
try {
|
||||
Q.bool(
|
||||
Q.minShouldMatch(4),
|
||||
Q.minShouldMatch(2)
|
||||
)
|
||||
t.fail('should throw')
|
||||
} catch (err) {
|
||||
t.is(err.message, 'minimum_should_match has already been defined')
|
||||
}
|
||||
|
||||
t.end()
|
||||
})
|
||||
358
test/dsl/boolean-and-helpers.test.ts
Normal file
358
test/dsl/boolean-and-helpers.test.ts
Normal file
@ -0,0 +1,358 @@
|
||||
'use strict'
|
||||
|
||||
import { test } from 'tap'
|
||||
import { Q } from '../../dsl'
|
||||
|
||||
test('AND', t => {
|
||||
t.test('Bool and Bool', t => {
|
||||
noShouldClauses(
|
||||
t,
|
||||
Q.bool(Q.must(Q.match('foo', 'bar'))),
|
||||
Q.bool(Q.filter(Q.term('baz', 'faz')))
|
||||
)
|
||||
|
||||
noShouldClausesWithName(
|
||||
t,
|
||||
Q.bool(Q.must(Q.match('foo', 'bar')), Q.name('name')),
|
||||
Q.bool(Q.filter(Q.term('baz', 'faz')))
|
||||
)
|
||||
|
||||
shouldClauses(
|
||||
t,
|
||||
Q.bool(Q.must(Q.match('foo', 'bar'))),
|
||||
Q.bool(Q.should(Q.term('baz', 'faz')))
|
||||
)
|
||||
|
||||
sameClauseNoShould(
|
||||
t,
|
||||
Q.bool(Q.must(Q.match('foo', 'bar'))),
|
||||
Q.bool(Q.must(Q.term('baz', 'faz')))
|
||||
)
|
||||
|
||||
sameClauseYesShould(
|
||||
t,
|
||||
Q.bool(Q.should(Q.match('foo', 'bar'))),
|
||||
Q.bool(Q.should(Q.term('baz', 'faz')))
|
||||
)
|
||||
|
||||
moreNoShould(
|
||||
t,
|
||||
Q.bool(Q.must(Q.match('foo', 'bar'))),
|
||||
Q.bool(Q.filter(Q.term('baz', 'faz'))),
|
||||
Q.bool(Q.filter(Q.term('winter', 'is coming')))
|
||||
)
|
||||
|
||||
moreYesShould(
|
||||
t,
|
||||
Q.bool(Q.must(Q.match('foo', 'bar'))),
|
||||
Q.bool(Q.filter(Q.term('baz', 'faz'))),
|
||||
Q.bool(Q.should(Q.term('winter', 'is coming')))
|
||||
)
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('Bool and Clause', t => {
|
||||
noShouldClauses(
|
||||
t,
|
||||
Q.bool(Q.must(Q.match('foo', 'bar'))),
|
||||
Q.filter(Q.term('baz', 'faz'))
|
||||
)
|
||||
|
||||
shouldClauses(
|
||||
t,
|
||||
Q.bool(Q.must(Q.match('foo', 'bar'))),
|
||||
Q.should(Q.term('baz', 'faz'))
|
||||
)
|
||||
|
||||
sameClauseNoShould(
|
||||
t,
|
||||
Q.bool(Q.must(Q.match('foo', 'bar'))),
|
||||
Q.must(Q.term('baz', 'faz'))
|
||||
)
|
||||
|
||||
sameClauseYesShould(
|
||||
t,
|
||||
Q.bool(Q.should(Q.match('foo', 'bar'))),
|
||||
Q.should(Q.term('baz', 'faz'))
|
||||
)
|
||||
|
||||
moreNoShould(
|
||||
t,
|
||||
Q.bool(Q.must(Q.match('foo', 'bar'))),
|
||||
Q.filter(Q.term('baz', 'faz')),
|
||||
Q.filter(Q.term('winter', 'is coming'))
|
||||
)
|
||||
|
||||
moreYesShould(
|
||||
t,
|
||||
Q.bool(Q.must(Q.match('foo', 'bar'))),
|
||||
Q.filter(Q.term('baz', 'faz')),
|
||||
Q.should(Q.term('winter', 'is coming'))
|
||||
)
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('Clause and Bool', t => {
|
||||
noShouldClauses(
|
||||
t,
|
||||
Q.must(Q.match('foo', 'bar')),
|
||||
Q.bool(Q.filter(Q.term('baz', 'faz')))
|
||||
)
|
||||
|
||||
shouldClauses(
|
||||
t,
|
||||
Q.must(Q.match('foo', 'bar')),
|
||||
Q.bool(Q.should(Q.term('baz', 'faz')))
|
||||
)
|
||||
|
||||
sameClauseNoShould(
|
||||
t,
|
||||
Q.must(Q.match('foo', 'bar')),
|
||||
Q.bool(Q.must(Q.term('baz', 'faz')))
|
||||
)
|
||||
|
||||
sameClauseYesShould(
|
||||
t,
|
||||
Q.should(Q.match('foo', 'bar')),
|
||||
Q.bool(Q.should(Q.term('baz', 'faz')))
|
||||
)
|
||||
|
||||
moreNoShould(
|
||||
t,
|
||||
Q.must(Q.match('foo', 'bar')),
|
||||
Q.filter(Q.term('baz', 'faz')),
|
||||
Q.bool(Q.filter(Q.term('winter', 'is coming')))
|
||||
)
|
||||
|
||||
moreYesShould(
|
||||
t,
|
||||
Q.must(Q.match('foo', 'bar')),
|
||||
Q.filter(Q.term('baz', 'faz')),
|
||||
Q.bool(Q.should(Q.term('winter', 'is coming')))
|
||||
)
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('Clause and Clause', t => {
|
||||
noShouldClauses(
|
||||
t,
|
||||
Q.must(Q.match('foo', 'bar')),
|
||||
Q.filter(Q.term('baz', 'faz'))
|
||||
)
|
||||
|
||||
shouldClauses(
|
||||
t,
|
||||
Q.must(Q.match('foo', 'bar')),
|
||||
Q.should(Q.term('baz', 'faz'))
|
||||
)
|
||||
|
||||
sameClauseNoShould(
|
||||
t,
|
||||
Q.must(Q.match('foo', 'bar')),
|
||||
Q.must(Q.term('baz', 'faz'))
|
||||
)
|
||||
|
||||
sameClauseYesShould(
|
||||
t,
|
||||
Q.should(Q.match('foo', 'bar')),
|
||||
Q.should(Q.term('baz', 'faz'))
|
||||
)
|
||||
|
||||
moreNoShould(
|
||||
t,
|
||||
Q.must(Q.match('foo', 'bar')),
|
||||
Q.filter(Q.term('baz', 'faz')),
|
||||
Q.filter(Q.term('winter', 'is coming'))
|
||||
)
|
||||
|
||||
moreYesShould(
|
||||
t,
|
||||
Q.must(Q.match('foo', 'bar')),
|
||||
Q.filter(Q.term('baz', 'faz')),
|
||||
Q.should(Q.term('winter', 'is coming'))
|
||||
)
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('Bool and Condition', t => {
|
||||
const query1 = Q.bool(Q.must(Q.match('foo', 'bar')))
|
||||
const query2 = Q.term('baz', 'faz')
|
||||
|
||||
t.deepEqual(Q.and(query1, query2), {
|
||||
bool: {
|
||||
must: [
|
||||
{ match: { foo: 'bar' } },
|
||||
{ term: { baz: 'faz' } }
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('Bool (with should) and Condition', t => {
|
||||
const query1 = Q.bool(Q.should(Q.match('foo', 'bar')))
|
||||
const query2 = Q.term('baz', 'faz')
|
||||
|
||||
t.deepEqual(Q.and(query1, query2), {
|
||||
bool: {
|
||||
must: [{ term: { baz: 'faz' } }],
|
||||
should: [{ match: { foo: 'bar' } }]
|
||||
}
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('Condition and Condition', t => {
|
||||
const query1 = Q.match('foo', 'bar')
|
||||
const query2 = Q.term('baz', 'faz')
|
||||
|
||||
t.deepEqual(Q.and(query1, query2), {
|
||||
bool: {
|
||||
must: [
|
||||
{ match: { foo: 'bar' } },
|
||||
{ term: { baz: 'faz' } }
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.end()
|
||||
|
||||
function noShouldClauses (t, query1, query2) {
|
||||
t.test('No should clauses', t => {
|
||||
t.deepEqual(Q.and(query1, query2), {
|
||||
bool: {
|
||||
must: [{ match: { foo: 'bar' } }],
|
||||
filter: [{ term: { baz: 'faz' } }]
|
||||
}
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
}
|
||||
|
||||
function noShouldClausesWithName (t, query1, query2) {
|
||||
t.test('No should clauses with name', t => {
|
||||
t.deepEqual(Q.and(query1, query2), {
|
||||
bool: {
|
||||
must: [{
|
||||
bool: {
|
||||
must: [{ match: { foo: 'bar' } }],
|
||||
_name: 'name'
|
||||
}
|
||||
}],
|
||||
filter: [{ term: { baz: 'faz' } }]
|
||||
}
|
||||
})
|
||||
|
||||
t.deepEqual(Q.and(query1, query2), Q.and(query2, query1))
|
||||
|
||||
t.end()
|
||||
})
|
||||
}
|
||||
|
||||
function shouldClauses (t, query1, query2) {
|
||||
t.test('Should clauses', t => {
|
||||
t.deepEqual(Q.and(query1, query2), {
|
||||
bool: {
|
||||
must: [
|
||||
{ match: { foo: 'bar' } },
|
||||
{
|
||||
bool: {
|
||||
should: [
|
||||
{ term: { baz: 'faz' } }
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
}
|
||||
|
||||
function sameClauseNoShould (t, query1, query2) {
|
||||
t.test('same clauses without should', t => {
|
||||
t.deepEqual(Q.and(query1, query2), {
|
||||
bool: {
|
||||
must: [
|
||||
{ match: { foo: 'bar' } },
|
||||
{ term: { baz: 'faz' } }
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
}
|
||||
|
||||
function sameClauseYesShould (t, query1, query2) {
|
||||
t.test('same clauses with should', t => {
|
||||
t.deepEqual(Q.and(query1, query2), {
|
||||
bool: {
|
||||
must: [
|
||||
{
|
||||
bool: {
|
||||
should: [
|
||||
{ term: { baz: 'faz' } }
|
||||
]
|
||||
}
|
||||
}
|
||||
],
|
||||
should: [
|
||||
{ match: { foo: 'bar' } }
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
}
|
||||
|
||||
function moreNoShould (t, query1, query2, query3) {
|
||||
t.test('More than two clauses without should', t => {
|
||||
t.deepEqual(Q.and(query1, query2, query3), {
|
||||
bool: {
|
||||
must: [{ match: { foo: 'bar' } }],
|
||||
filter: [
|
||||
{ term: { baz: 'faz' } },
|
||||
{ term: { winter: 'is coming' } }
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
}
|
||||
|
||||
function moreYesShould (t, query1, query2, query3) {
|
||||
t.test('More than two clauses with should', t => {
|
||||
t.deepEqual(Q.and(query1, query2, query3), {
|
||||
bool: {
|
||||
must: [
|
||||
{ match: { foo: 'bar' } },
|
||||
{
|
||||
bool: {
|
||||
should: [
|
||||
{ term: { winter: 'is coming' } }
|
||||
]
|
||||
}
|
||||
}
|
||||
],
|
||||
filter: [{ term: { baz: 'faz' } }]
|
||||
}
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
}
|
||||
})
|
||||
153
test/dsl/boolean-not-helpers.test.ts
Normal file
153
test/dsl/boolean-not-helpers.test.ts
Normal file
@ -0,0 +1,153 @@
|
||||
'use strict'
|
||||
|
||||
import { test } from 'tap'
|
||||
import { Q } from '../../dsl'
|
||||
|
||||
test('NOT', t => {
|
||||
t.test('Bool query', t => {
|
||||
const query = Q.bool(
|
||||
Q.must(Q.match('foo', 'bar')),
|
||||
Q.filter(Q.term('baz', 'faz'))
|
||||
)
|
||||
|
||||
t.deepEqual(Q.not(query), {
|
||||
bool: {
|
||||
must_not: [{
|
||||
bool: {
|
||||
must: [{ match: { foo: 'bar' } }],
|
||||
filter: [{ term: { baz: 'faz' } }]
|
||||
}
|
||||
}]
|
||||
}
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('Bool query (with must_not)', t => {
|
||||
const query = Q.bool(
|
||||
Q.must(Q.match('foo', 'bar')),
|
||||
Q.mustNot(Q.term('baz', 'faz'))
|
||||
)
|
||||
t.deepEqual(Q.not(query), {
|
||||
bool: {
|
||||
must_not: [{ match: { foo: 'bar' } }],
|
||||
must: [{ term: { baz: 'faz' } }]
|
||||
}
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('Bool query (only must)', t => {
|
||||
const query = Q.bool(
|
||||
Q.must(Q.match('foo', 'bar'))
|
||||
)
|
||||
|
||||
t.deepEqual(Q.not(query), {
|
||||
bool: {
|
||||
must_not: [{
|
||||
match: { foo: 'bar' }
|
||||
}]
|
||||
}
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('Bool query (only must_not)', t => {
|
||||
const query = Q.bool(
|
||||
Q.mustNot(Q.match('foo', 'bar'))
|
||||
)
|
||||
|
||||
t.deepEqual(Q.not(query), {
|
||||
bool: {
|
||||
must: [{
|
||||
match: { foo: 'bar' }
|
||||
}]
|
||||
}
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('Must clause', t => {
|
||||
const query = Q.must(Q.match('foo', 'bar'))
|
||||
|
||||
t.deepEqual(Q.not(query), {
|
||||
bool: {
|
||||
must_not: [{
|
||||
match: { foo: 'bar' }
|
||||
}]
|
||||
}
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('Should clause', t => {
|
||||
const query = Q.should(Q.match('foo', 'bar'))
|
||||
|
||||
t.deepEqual(Q.not(query), {
|
||||
bool: {
|
||||
must_not: [{
|
||||
bool: {
|
||||
should: [{
|
||||
match: { foo: 'bar' }
|
||||
}]
|
||||
}
|
||||
}]
|
||||
}
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('Filter clause', t => {
|
||||
const query = Q.filter(Q.match('foo', 'bar'))
|
||||
|
||||
t.deepEqual(Q.not(query), {
|
||||
bool: {
|
||||
must_not: [{
|
||||
bool: {
|
||||
filter: [{
|
||||
match: { foo: 'bar' }
|
||||
}]
|
||||
}
|
||||
}]
|
||||
}
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('Must clause', t => {
|
||||
const query = Q.mustNot(Q.match('foo', 'bar'))
|
||||
|
||||
t.deepEqual(Q.not(query), {
|
||||
bool: {
|
||||
must: [{
|
||||
match: { foo: 'bar' }
|
||||
}]
|
||||
}
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('Condition', t => {
|
||||
const query = Q.match('foo', 'bar')
|
||||
|
||||
t.deepEqual(Q.not(query), {
|
||||
bool: {
|
||||
must_not: [{
|
||||
match: { foo: 'bar' }
|
||||
}]
|
||||
}
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
316
test/dsl/boolean-or-helpers.test.ts
Normal file
316
test/dsl/boolean-or-helpers.test.ts
Normal file
@ -0,0 +1,316 @@
|
||||
'use strict'
|
||||
|
||||
import { test } from 'tap'
|
||||
import { Q } from '../../dsl'
|
||||
|
||||
test('OR', t => {
|
||||
t.test('Bool and Bool', t => {
|
||||
noShouldClauses(
|
||||
t,
|
||||
Q.bool(Q.must(Q.match('foo', 'bar'))),
|
||||
Q.bool(Q.filter(Q.term('baz', 'faz')))
|
||||
)
|
||||
|
||||
shouldClauses(
|
||||
t,
|
||||
Q.bool(Q.must(Q.match('foo', 'bar'))),
|
||||
Q.bool(Q.should(Q.term('baz', 'faz')))
|
||||
)
|
||||
|
||||
sameClauseNoShould(
|
||||
t,
|
||||
Q.bool(Q.must(Q.match('foo', 'bar'))),
|
||||
Q.bool(Q.must(Q.term('baz', 'faz')))
|
||||
)
|
||||
|
||||
sameClauseYesShould(
|
||||
t,
|
||||
Q.bool(Q.should(Q.match('foo', 'bar'))),
|
||||
Q.bool(Q.should(Q.term('baz', 'faz')))
|
||||
)
|
||||
|
||||
moreNoShould(
|
||||
t,
|
||||
Q.bool(Q.must(Q.match('foo', 'bar'))),
|
||||
Q.bool(Q.filter(Q.term('baz', 'faz'))),
|
||||
Q.bool(Q.filter(Q.term('winter', 'is coming')))
|
||||
)
|
||||
|
||||
moreYesShould(
|
||||
t,
|
||||
Q.bool(Q.must(Q.match('foo', 'bar'))),
|
||||
Q.bool(Q.filter(Q.term('baz', 'faz'))),
|
||||
Q.bool(Q.should(Q.term('winter', 'is coming')))
|
||||
)
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('Bool and Clause', t => {
|
||||
noShouldClauses(
|
||||
t,
|
||||
Q.bool(Q.must(Q.match('foo', 'bar'))),
|
||||
Q.filter(Q.term('baz', 'faz'))
|
||||
)
|
||||
|
||||
shouldClauses(
|
||||
t,
|
||||
Q.bool(Q.must(Q.match('foo', 'bar'))),
|
||||
Q.should(Q.term('baz', 'faz'))
|
||||
)
|
||||
|
||||
sameClauseNoShould(
|
||||
t,
|
||||
Q.bool(Q.must(Q.match('foo', 'bar'))),
|
||||
Q.must(Q.term('baz', 'faz'))
|
||||
)
|
||||
|
||||
sameClauseYesShould(
|
||||
t,
|
||||
Q.bool(Q.should(Q.match('foo', 'bar'))),
|
||||
Q.should(Q.term('baz', 'faz'))
|
||||
)
|
||||
|
||||
moreNoShould(
|
||||
t,
|
||||
Q.bool(Q.must(Q.match('foo', 'bar'))),
|
||||
Q.filter(Q.term('baz', 'faz')),
|
||||
Q.filter(Q.term('winter', 'is coming'))
|
||||
)
|
||||
|
||||
moreYesShould(
|
||||
t,
|
||||
Q.bool(Q.must(Q.match('foo', 'bar'))),
|
||||
Q.filter(Q.term('baz', 'faz')),
|
||||
Q.should(Q.term('winter', 'is coming'))
|
||||
)
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('Clause and Bool', t => {
|
||||
noShouldClauses(
|
||||
t,
|
||||
Q.must(Q.match('foo', 'bar')),
|
||||
Q.bool(Q.filter(Q.term('baz', 'faz')))
|
||||
)
|
||||
|
||||
shouldClauses(
|
||||
t,
|
||||
Q.must(Q.match('foo', 'bar')),
|
||||
Q.bool(Q.should(Q.term('baz', 'faz')))
|
||||
)
|
||||
|
||||
sameClauseNoShould(
|
||||
t,
|
||||
Q.must(Q.match('foo', 'bar')),
|
||||
Q.bool(Q.must(Q.term('baz', 'faz')))
|
||||
)
|
||||
|
||||
sameClauseYesShould(
|
||||
t,
|
||||
Q.should(Q.match('foo', 'bar')),
|
||||
Q.bool(Q.should(Q.term('baz', 'faz')))
|
||||
)
|
||||
|
||||
moreNoShould(
|
||||
t,
|
||||
Q.must(Q.match('foo', 'bar')),
|
||||
Q.filter(Q.term('baz', 'faz')),
|
||||
Q.bool(Q.filter(Q.term('winter', 'is coming')))
|
||||
)
|
||||
|
||||
moreYesShould(
|
||||
t,
|
||||
Q.must(Q.match('foo', 'bar')),
|
||||
Q.filter(Q.term('baz', 'faz')),
|
||||
Q.bool(Q.should(Q.term('winter', 'is coming')))
|
||||
)
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('Clause and Clause', t => {
|
||||
noShouldClauses(
|
||||
t,
|
||||
Q.must(Q.match('foo', 'bar')),
|
||||
Q.filter(Q.term('baz', 'faz'))
|
||||
)
|
||||
|
||||
shouldClauses(
|
||||
t,
|
||||
Q.must(Q.match('foo', 'bar')),
|
||||
Q.should(Q.term('baz', 'faz'))
|
||||
)
|
||||
|
||||
sameClauseNoShould(
|
||||
t,
|
||||
Q.must(Q.match('foo', 'bar')),
|
||||
Q.must(Q.term('baz', 'faz'))
|
||||
)
|
||||
|
||||
sameClauseYesShould(
|
||||
t,
|
||||
Q.should(Q.match('foo', 'bar')),
|
||||
Q.should(Q.term('baz', 'faz'))
|
||||
)
|
||||
|
||||
moreNoShould(
|
||||
t,
|
||||
Q.must(Q.match('foo', 'bar')),
|
||||
Q.filter(Q.term('baz', 'faz')),
|
||||
Q.filter(Q.term('winter', 'is coming'))
|
||||
)
|
||||
|
||||
moreYesShould(
|
||||
t,
|
||||
Q.must(Q.match('foo', 'bar')),
|
||||
Q.filter(Q.term('baz', 'faz')),
|
||||
Q.should(Q.term('winter', 'is coming'))
|
||||
)
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('Bool and Condition', t => {
|
||||
const query1 = Q.bool(Q.must(Q.match('foo', 'bar')))
|
||||
const query2 = Q.term('baz', 'faz')
|
||||
|
||||
t.deepEqual(Q.or(query1, query2), {
|
||||
bool: {
|
||||
should: [
|
||||
{ bool: { must: [{ match: { foo: 'bar' } }] } },
|
||||
{ term: { baz: 'faz' } }
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('Bool (with should) and Condition', t => {
|
||||
const query1 = Q.bool(Q.should(Q.match('foo', 'bar')))
|
||||
const query2 = Q.term('baz', 'faz')
|
||||
|
||||
t.deepEqual(Q.or(query1, query2), {
|
||||
bool: {
|
||||
should: [
|
||||
{ match: { foo: 'bar' } },
|
||||
{ term: { baz: 'faz' } }
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('Condition and Condition', t => {
|
||||
const query1 = Q.match('foo', 'bar')
|
||||
const query2 = Q.term('baz', 'faz')
|
||||
|
||||
t.deepEqual(Q.or(query1, query2), {
|
||||
bool: {
|
||||
should: [
|
||||
{ match: { foo: 'bar' } },
|
||||
{ term: { baz: 'faz' } }
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.end()
|
||||
|
||||
function noShouldClauses (t, query1, query2) {
|
||||
t.test('No should clauses', t => {
|
||||
t.deepEqual(Q.or(query1, query2), {
|
||||
bool: {
|
||||
should: [
|
||||
{ bool: { must: [{ match: { foo: 'bar' } }] } },
|
||||
{ bool: { filter: [{ term: { baz: 'faz' } }] } }
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
}
|
||||
|
||||
function shouldClauses (t, query1, query2) {
|
||||
t.test('Should clauses', t => {
|
||||
t.deepEqual(Q.or(query1, query2), {
|
||||
bool: {
|
||||
should: [
|
||||
{ bool: { must: [{ match: { foo: 'bar' } }] } },
|
||||
{ term: { baz: 'faz' } }
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
}
|
||||
|
||||
function sameClauseNoShould (t, query1, query2) {
|
||||
t.test('same clauses without should', t => {
|
||||
t.deepEqual(Q.or(query1, query2), {
|
||||
bool: {
|
||||
should: [
|
||||
{ bool: { must: [{ match: { foo: 'bar' } }] } },
|
||||
{ bool: { must: [{ term: { baz: 'faz' } }] } }
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
}
|
||||
|
||||
function sameClauseYesShould (t, query1, query2) {
|
||||
t.test('same clauses with should', t => {
|
||||
t.deepEqual(Q.or(query1, query2), {
|
||||
bool: {
|
||||
should: [
|
||||
{ match: { foo: 'bar' } },
|
||||
{ term: { baz: 'faz' } }
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
}
|
||||
|
||||
function moreNoShould (t, query1, query2, query3) {
|
||||
t.test('More than two clauses without should', t => {
|
||||
t.deepEqual(Q.or(query1, query2, query3), {
|
||||
bool: {
|
||||
should: [
|
||||
{ bool: { must: [{ match: { foo: 'bar' } }] } },
|
||||
{ bool: { filter: [{ term: { baz: 'faz' } }] } },
|
||||
{ bool: { filter: [{ term: { winter: 'is coming' } }] } }
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
}
|
||||
|
||||
function moreYesShould (t, query1, query2, query3) {
|
||||
t.test('More than two clauses with should', t => {
|
||||
t.deepEqual(Q.or(query1, query2, query3), {
|
||||
bool: {
|
||||
should: [
|
||||
{ bool: { must: [{ match: { foo: 'bar' } }] } },
|
||||
{ bool: { filter: [{ term: { baz: 'faz' } }] } },
|
||||
{ term: { winter: 'is coming' } }
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
}
|
||||
})
|
||||
556
test/dsl/fluent-query.test.ts
Normal file
556
test/dsl/fluent-query.test.ts
Normal file
@ -0,0 +1,556 @@
|
||||
'use strict'
|
||||
|
||||
import { test } from 'tap'
|
||||
/* eslint-disable no-unused-vars */
|
||||
import * as types from '../../dsl/lib/types'
|
||||
/* eslint-enable no-unused-vars */
|
||||
import { F, Q } from '../../dsl'
|
||||
|
||||
test('match', t => {
|
||||
t.deepEqual(
|
||||
F()
|
||||
.match('foo', 'bar')
|
||||
.match('foo', 'baz')
|
||||
.build(),
|
||||
Q(
|
||||
Q.match('foo', 'bar'),
|
||||
Q.match('foo', 'baz')
|
||||
)
|
||||
)
|
||||
|
||||
t.deepEqual(
|
||||
F()
|
||||
.match('foo', ['bar', 'baz'])
|
||||
.build(),
|
||||
Q(
|
||||
Q.match('foo', ['bar', 'baz'])
|
||||
)
|
||||
)
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('matchPhrase', t => {
|
||||
t.deepEqual(
|
||||
F()
|
||||
.matchPhrase('foo', 'bar')
|
||||
.build(),
|
||||
Q(Q.matchPhrase('foo', 'bar'))
|
||||
)
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('matchPhrasePrefix', t => {
|
||||
t.deepEqual(
|
||||
F()
|
||||
.matchPhrasePrefix('foo', 'bar')
|
||||
.build(),
|
||||
Q(Q.matchPhrasePrefix('foo', 'bar'))
|
||||
)
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('multiMatch', t => {
|
||||
t.deepEqual(
|
||||
F()
|
||||
.multiMatch(['foo1', 'foo2'], 'bar')
|
||||
.build(),
|
||||
Q(Q.multiMatch(['foo1', 'foo2'], 'bar'))
|
||||
)
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('matchAll', t => {
|
||||
t.deepEqual(
|
||||
F()
|
||||
.matchAll()
|
||||
.build(),
|
||||
Q(Q.matchAll())
|
||||
)
|
||||
|
||||
t.deepEqual(
|
||||
F()
|
||||
.matchAll({ boost: 1 })
|
||||
.build(),
|
||||
Q(Q.matchAll({ boost: 1 }))
|
||||
)
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('matchNone', t => {
|
||||
t.deepEqual(
|
||||
F()
|
||||
.matchNone()
|
||||
.build(),
|
||||
Q(Q.matchNone())
|
||||
)
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('common', t => {
|
||||
t.deepEqual(
|
||||
F()
|
||||
.common('key', 'val')
|
||||
.build(),
|
||||
Q(Q.common('key', 'val'))
|
||||
)
|
||||
|
||||
t.deepEqual(
|
||||
F()
|
||||
.common('key', 'val', { boost: 1 })
|
||||
.build(),
|
||||
Q(Q.common('key', 'val', { boost: 1 }))
|
||||
)
|
||||
|
||||
t.deepEqual(
|
||||
F()
|
||||
.common('key', ['val1', 'val2'])
|
||||
.build(),
|
||||
Q(Q.common('key', ['val1', 'val2']))
|
||||
)
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('queryString', t => {
|
||||
t.deepEqual(
|
||||
F()
|
||||
.queryString('val', { boost: 1 })
|
||||
.build(),
|
||||
Q(Q.queryString('val', { boost: 1 }))
|
||||
)
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('simpleQueryString', t => {
|
||||
t.deepEqual(
|
||||
F()
|
||||
.simpleQueryString('val', { boost: 1 })
|
||||
.build(),
|
||||
Q(Q.simpleQueryString('val', { boost: 1 }))
|
||||
)
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('term', t => {
|
||||
t.deepEqual(
|
||||
F()
|
||||
.term('key', 'val')
|
||||
.build(),
|
||||
Q(Q.term('key', 'val'))
|
||||
)
|
||||
|
||||
t.deepEqual(
|
||||
F()
|
||||
.term('key', 'val', { boost: 1 })
|
||||
.build(),
|
||||
Q(Q.term('key', 'val', { boost: 1 }))
|
||||
)
|
||||
|
||||
t.deepEqual(
|
||||
F()
|
||||
.term('key', ['val1', 'val2'])
|
||||
.build(),
|
||||
Q(Q.term('key', ['val1', 'val2']))
|
||||
)
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('terms', t => {
|
||||
t.deepEqual(
|
||||
F()
|
||||
.terms('key', ['val1', 'val2'])
|
||||
.build(),
|
||||
Q(Q.terms('key', ['val1', 'val2']))
|
||||
)
|
||||
|
||||
t.deepEqual(
|
||||
F()
|
||||
.terms('key', ['val1', 'val2'], { boost: 1 })
|
||||
.build(),
|
||||
Q(Q.terms('key', ['val1', 'val2'], { boost: 1 }))
|
||||
)
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('termsSet', t => {
|
||||
t.deepEqual(
|
||||
F()
|
||||
.termsSet('key', ['val1', 'val2'])
|
||||
.build(),
|
||||
Q(Q.termsSet('key', ['val1', 'val2']))
|
||||
)
|
||||
|
||||
t.deepEqual(
|
||||
F()
|
||||
.termsSet('key', ['val1', 'val2'], { boost: 1 })
|
||||
.build(),
|
||||
Q(Q.termsSet('key', ['val1', 'val2'], { boost: 1 }))
|
||||
)
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('range', t => {
|
||||
t.deepEqual(
|
||||
F()
|
||||
.range('key', { gt: 10 })
|
||||
.build(),
|
||||
Q(Q.range('key', { gt: 10 }))
|
||||
)
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('exists', t => {
|
||||
t.deepEqual(
|
||||
F()
|
||||
.exists('key')
|
||||
.build(),
|
||||
Q(Q.exists('key'))
|
||||
)
|
||||
|
||||
t.deepEqual(
|
||||
F()
|
||||
.exists(['key1', 'key2'])
|
||||
.build(),
|
||||
Q(Q.exists(['key1', 'key2']))
|
||||
)
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('prefix', t => {
|
||||
t.deepEqual(
|
||||
F()
|
||||
.prefix('key', 'val')
|
||||
.build(),
|
||||
Q(Q.prefix('key', 'val'))
|
||||
)
|
||||
|
||||
t.deepEqual(
|
||||
F()
|
||||
.prefix('key', 'val', { boost: 1 })
|
||||
.build(),
|
||||
Q(Q.prefix('key', 'val', { boost: 1 }))
|
||||
)
|
||||
|
||||
t.deepEqual(
|
||||
F()
|
||||
.prefix('key', ['val1', 'val2'])
|
||||
.build(),
|
||||
Q(Q.prefix('key', ['val1', 'val2']))
|
||||
)
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('wildcard', t => {
|
||||
t.deepEqual(
|
||||
F()
|
||||
.wildcard('key', 'val')
|
||||
.build(),
|
||||
Q(Q.wildcard('key', 'val'))
|
||||
)
|
||||
|
||||
t.deepEqual(
|
||||
F()
|
||||
.wildcard('key', 'val', { boost: 1 })
|
||||
.build(),
|
||||
Q(Q.wildcard('key', 'val', { boost: 1 }))
|
||||
)
|
||||
|
||||
t.deepEqual(
|
||||
F()
|
||||
.wildcard('key', ['val1', 'val2'])
|
||||
.build(),
|
||||
Q(Q.wildcard('key', ['val1', 'val2']))
|
||||
)
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('regexp', t => {
|
||||
t.deepEqual(
|
||||
F()
|
||||
.regexp('key', 'val')
|
||||
.build(),
|
||||
Q(Q.regexp('key', 'val'))
|
||||
)
|
||||
|
||||
t.deepEqual(
|
||||
F()
|
||||
.regexp('key', 'val', { boost: 1 })
|
||||
.build(),
|
||||
Q(Q.regexp('key', 'val', { boost: 1 }))
|
||||
)
|
||||
|
||||
t.deepEqual(
|
||||
F()
|
||||
.regexp('key', ['val1', 'val2'])
|
||||
.build(),
|
||||
Q(Q.regexp('key', ['val1', 'val2']))
|
||||
)
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('fuzzy', t => {
|
||||
t.deepEqual(
|
||||
F()
|
||||
.fuzzy('key', 'val')
|
||||
.build(),
|
||||
Q(Q.fuzzy('key', 'val'))
|
||||
)
|
||||
|
||||
t.deepEqual(
|
||||
F()
|
||||
.fuzzy('key', 'val', { boost: 1 })
|
||||
.build(),
|
||||
Q(Q.fuzzy('key', 'val', { boost: 1 }))
|
||||
)
|
||||
|
||||
t.deepEqual(
|
||||
F()
|
||||
.fuzzy('key', ['val1', 'val2'])
|
||||
.build(),
|
||||
Q(Q.fuzzy('key', ['val1', 'val2']))
|
||||
)
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('ids', t => {
|
||||
t.deepEqual(
|
||||
F()
|
||||
.ids('key', ['val1', 'val2'])
|
||||
.build(),
|
||||
Q(Q.ids('key', ['val1', 'val2']))
|
||||
)
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('minShouldMatch', t => {
|
||||
t.deepEqual(
|
||||
F()
|
||||
.minShouldMatch(42)
|
||||
.build(),
|
||||
Q(Q.minShouldMatch(42))
|
||||
)
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('name', t => {
|
||||
t.deepEqual(
|
||||
F()
|
||||
.name('test')
|
||||
.build(),
|
||||
Q(Q.name('test'))
|
||||
)
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('size', t => {
|
||||
t.deepEqual(
|
||||
F()
|
||||
.size(42)
|
||||
.build(),
|
||||
Q(Q.size(42))
|
||||
)
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('script', t => {
|
||||
t.deepEqual(
|
||||
F().script("doc['num1'].value > 1").build(),
|
||||
Q(Q.script("doc['num1'].value > 1"))
|
||||
)
|
||||
|
||||
t.deepEqual(
|
||||
F().script("doc['num1'].value > 1", 'painless').build(),
|
||||
Q(Q.script("doc['num1'].value > 1", 'painless'))
|
||||
)
|
||||
|
||||
t.deepEqual(
|
||||
F().script("doc['num1'].value > 1", { foo: 'bar' }).build(),
|
||||
Q(Q.script("doc['num1'].value > 1", { foo: 'bar' }))
|
||||
)
|
||||
|
||||
t.deepEqual(
|
||||
F().script("doc['num1'].value > 1", { foo: 'bar' }, 'painless').build(),
|
||||
Q(Q.script("doc['num1'].value > 1", { foo: 'bar' }, 'painless'))
|
||||
)
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('must', t => {
|
||||
const q1 = F().must(
|
||||
F().match('foo', 'bar'),
|
||||
F().term('hello', 'world'),
|
||||
F().should(
|
||||
F().match('one', 'two').match('three', 'four')
|
||||
),
|
||||
F().match('faz', ['baz', 'zaz'])
|
||||
)
|
||||
|
||||
const q2 = Q.must(
|
||||
Q.match('foo', 'bar'),
|
||||
Q.term('hello', 'world'),
|
||||
Q.should(
|
||||
Q.match('one', 'two'),
|
||||
Q.match('three', 'four')
|
||||
),
|
||||
Q.match('faz', ['baz', 'zaz'])
|
||||
)
|
||||
|
||||
t.deepEqual(q1.buildQuery(), Q.bool(q2))
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('should', t => {
|
||||
const q1 = F().should(
|
||||
F().match('foo', 'bar'),
|
||||
F().term('hello', 'world'),
|
||||
F().must(
|
||||
F().match('one', 'two').match('three', 'four')
|
||||
),
|
||||
F().match('faz', ['baz', 'zaz'])
|
||||
)
|
||||
|
||||
const q2 = Q.should(
|
||||
Q.match('foo', 'bar'),
|
||||
Q.term('hello', 'world'),
|
||||
Q.must(
|
||||
Q.match('one', 'two'),
|
||||
Q.match('three', 'four')
|
||||
),
|
||||
Q.match('faz', ['baz', 'zaz'])
|
||||
)
|
||||
|
||||
t.deepEqual(q1.buildQuery(), Q.bool(q2))
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('mustNot', t => {
|
||||
const q1 = F().mustNot(
|
||||
F().match('foo', 'bar'),
|
||||
F().term('hello', 'world'),
|
||||
F().should(
|
||||
F().match('one', 'two').match('three', 'four')
|
||||
),
|
||||
F().match('faz', ['baz', 'zaz'])
|
||||
)
|
||||
|
||||
const q2 = Q.mustNot(
|
||||
Q.match('foo', 'bar'),
|
||||
Q.term('hello', 'world'),
|
||||
Q.should(
|
||||
Q.match('one', 'two'),
|
||||
Q.match('three', 'four')
|
||||
),
|
||||
Q.match('faz', ['baz', 'zaz'])
|
||||
)
|
||||
|
||||
t.deepEqual(q1.buildQuery(), Q.bool(q2))
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('filter', t => {
|
||||
const q1 = F().filter(
|
||||
F().match('foo', 'bar'),
|
||||
F().term('hello', 'world'),
|
||||
F().should(
|
||||
F().match('one', 'two').match('three', 'four')
|
||||
),
|
||||
F().match('faz', ['baz', 'zaz'])
|
||||
)
|
||||
|
||||
const q2 = Q.filter(
|
||||
Q.match('foo', 'bar'),
|
||||
Q.term('hello', 'world'),
|
||||
Q.should(
|
||||
Q.match('one', 'two'),
|
||||
Q.match('three', 'four')
|
||||
),
|
||||
Q.match('faz', ['baz', 'zaz'])
|
||||
)
|
||||
|
||||
t.deepEqual(q1.buildQuery(), Q.bool(q2))
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('and', t => {
|
||||
const q1 = F()
|
||||
.match('foo', 'bar')
|
||||
.match('foo', 'baz')
|
||||
|
||||
const q2 = F()
|
||||
.should(
|
||||
F()
|
||||
.term('foo', 'bar')
|
||||
.term('foo', 'baz')
|
||||
)
|
||||
|
||||
t.deepEqual(
|
||||
q1.clone().and(q2).buildQuery(),
|
||||
Q.and(q1.buildQuery(), q2.buildQuery())
|
||||
)
|
||||
|
||||
const q3 = F()
|
||||
.match('foo', 'bar')
|
||||
.match('foo', 'baz')
|
||||
.sort('foo')
|
||||
|
||||
const q4 = F()
|
||||
.should(
|
||||
F()
|
||||
.term('foo', 'bar')
|
||||
.term('foo', 'baz')
|
||||
)
|
||||
|
||||
t.deepEqual(
|
||||
q3.clone().and(q4).buildQuery(),
|
||||
Q.and(q3.buildQuery(), q4.buildQuery())
|
||||
)
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('toJSON', t => {
|
||||
const q1 = F()
|
||||
.match('foo', 'bar')
|
||||
|
||||
t.strictEqual(
|
||||
JSON.stringify(q1),
|
||||
'{"match":{"foo":"bar"}}'
|
||||
)
|
||||
|
||||
const q2 = F()
|
||||
.match('foo', 'bar')
|
||||
.match('foo', 'baz')
|
||||
|
||||
t.strictEqual(
|
||||
JSON.stringify(q2),
|
||||
'{"bool":{"must":[{"match":{"foo":"bar"}},{"match":{"foo":"baz"}}]}}'
|
||||
)
|
||||
|
||||
t.end()
|
||||
})
|
||||
1050
test/dsl/query.test.ts
Normal file
1050
test/dsl/query.test.ts
Normal file
File diff suppressed because it is too large
Load Diff
@ -861,6 +861,41 @@ test('Elastic cloud config', t => {
|
||||
t.deepEqual(pool._ssl, { secureProtocol: 'TLSv1_2_method' })
|
||||
})
|
||||
|
||||
t.test('Without kibana component', t => {
|
||||
t.plan(5)
|
||||
const client = new Client({
|
||||
cloud: {
|
||||
// 'localhost$abcd$efgh'
|
||||
id: 'name:bG9jYWxob3N0JGFiY2Qk',
|
||||
username: 'elastic',
|
||||
password: 'changeme'
|
||||
}
|
||||
})
|
||||
|
||||
const pool = client.connectionPool
|
||||
t.ok(pool instanceof CloudConnectionPool)
|
||||
t.match(pool.connections.find(c => c.id === 'https://abcd.localhost/'), {
|
||||
url: new URL('https://elastic:changeme@abcd.localhost'),
|
||||
id: 'https://abcd.localhost/',
|
||||
headers: {
|
||||
authorization: 'Basic ' + Buffer.from('elastic:changeme').toString('base64')
|
||||
},
|
||||
ssl: { secureProtocol: 'TLSv1_2_method' },
|
||||
deadCount: 0,
|
||||
resurrectTimeout: 0,
|
||||
roles: {
|
||||
master: true,
|
||||
data: true,
|
||||
ingest: true,
|
||||
ml: false
|
||||
}
|
||||
})
|
||||
|
||||
t.strictEqual(client.transport.compression, 'gzip')
|
||||
t.strictEqual(client.transport.suggestCompression, true)
|
||||
t.deepEqual(pool._ssl, { secureProtocol: 'TLSv1_2_method' })
|
||||
})
|
||||
|
||||
t.test('Auth as separate option', t => {
|
||||
t.plan(5)
|
||||
const client = new Client({
|
||||
|
||||
Reference in New Issue
Block a user