DSL initial commit
This commit is contained in:
17
dsl/examples/README.md
Normal file
17
dsl/examples/README.md
Normal file
@ -0,0 +1,17 @@
|
||||
# Examples
|
||||
|
||||
In this folder you will find different examples to show the usage of the DSL.
|
||||
|
||||
## Instructions
|
||||
Befoire to run any of the examples in this folder you should run `npm install` for installing all the required dependenices and the run the `loadRepo` script.
|
||||
|
||||
## Run an example
|
||||
Running an example is very easy, you just need to run the following command:
|
||||
```sh
|
||||
npm run example examples/<filename>
|
||||
```
|
||||
|
||||
For example:
|
||||
```sh
|
||||
npm run example examples/last-commits.ts
|
||||
```
|
||||
39
dsl/examples/boolean-logic.ts
Normal file
39
dsl/examples/boolean-logic.ts
Normal file
@ -0,0 +1,39 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { Client } from '../../'
|
||||
import { Q } from '../'
|
||||
|
||||
async function run () {
|
||||
const client = new Client({ node: 'http://localhost:9200' })
|
||||
|
||||
// define the query clauses
|
||||
const fixDescription = Q.must(Q.match('description', 'fix'))
|
||||
const files = Q.should(Q.term('files', 'test'), Q.term('files', 'docs'))
|
||||
const author = Q.filter(Q.term('author.name', Q.param('author')))
|
||||
const { body } = await client.search({
|
||||
index: 'git',
|
||||
// use the boolean utilities to craft the final query
|
||||
body: Q.and(fixDescription, files, author)
|
||||
})
|
||||
|
||||
console.log(body.hits.hits)
|
||||
}
|
||||
|
||||
run().catch(console.log)
|
||||
73
dsl/examples/compile-query.ts
Normal file
73
dsl/examples/compile-query.ts
Normal file
@ -0,0 +1,73 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { Client } from '../../'
|
||||
import { Q } from '../'
|
||||
|
||||
// You can compile a query if you need to get
|
||||
// the best performances out of your code.
|
||||
// The query crafting and compilation should be done
|
||||
// outside of your hot code path.
|
||||
// First of all yu should create your query almost
|
||||
// in the same way as you were doing before, the only
|
||||
// difference, is that all the paramegers you are passing
|
||||
// now should be updated with the `Q.param` API.
|
||||
// The only parameter or `Q.param`, is the name of the parameter
|
||||
// that you were passing before.
|
||||
const query = Q(
|
||||
Q.match('description', Q.param('description')),
|
||||
Q.filter(
|
||||
Q.term('author.name', Q.param('author'))
|
||||
),
|
||||
Q.size(10)
|
||||
)
|
||||
|
||||
// Afterwards, you can create an interface that represents
|
||||
// the input object of the compiled query. The input object
|
||||
// contains all the parameters you were passing before, the
|
||||
// keys are the same you have passed to the various `Q.param`
|
||||
// invocations before. It defaults to `unknown`.
|
||||
interface Input {
|
||||
description: string
|
||||
author: string
|
||||
}
|
||||
// Once you have created the query and the input interface,
|
||||
// you must pass the query to `Q.compile` and store the result
|
||||
// in a variable. `Q.compile` returns a function that accepts
|
||||
// a single object parameter, which is the same you have declared
|
||||
// in the interface before.
|
||||
const compiledQuery = Q.compile<Input>(query)
|
||||
|
||||
async function run () {
|
||||
const client = new Client({ node: 'http://localhost:9200' })
|
||||
|
||||
const { body } = await client.search({
|
||||
index: 'git',
|
||||
// Finally, you call the function inside your hot code path,
|
||||
// the returned value will be the query.
|
||||
body: compiledQuery({
|
||||
description: 'fix',
|
||||
author: 'delvedor'
|
||||
})
|
||||
})
|
||||
|
||||
console.log(body.hits.hits)
|
||||
}
|
||||
|
||||
run().catch(console.log)
|
||||
44
dsl/examples/day-most-commits.ts
Normal file
44
dsl/examples/day-most-commits.ts
Normal file
@ -0,0 +1,44 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { Client } from '../../'
|
||||
import { Q, A } from '../'
|
||||
|
||||
async function run () {
|
||||
const client = new Client({ node: 'http://localhost:9200' })
|
||||
|
||||
// get the day where the most commits were made
|
||||
const { body } = await client.search({
|
||||
index: 'git',
|
||||
body: Q(
|
||||
Q.size(0),
|
||||
// 'day_most_commits' is the name of the aggregation
|
||||
A(A.day_most_commits.dateHistogram({
|
||||
field: 'committed_date',
|
||||
interval: 'day',
|
||||
min_doc_count: 1,
|
||||
order: { _count: 'desc' }
|
||||
}))
|
||||
)
|
||||
})
|
||||
|
||||
console.log(body.aggregations)
|
||||
}
|
||||
|
||||
run().catch(console.log)
|
||||
47
dsl/examples/extend-aggregations.ts
Normal file
47
dsl/examples/extend-aggregations.ts
Normal file
@ -0,0 +1,47 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { Client } from '../../'
|
||||
import { Q, A } from '../'
|
||||
|
||||
async function run () {
|
||||
const client = new Client({ node: 'http://localhost:9200' })
|
||||
|
||||
// 'committers' is the name of the aggregation
|
||||
let committersAgg = A.committers.terms('committer.name.keyword')
|
||||
// instead of pass other aggregations as parameter
|
||||
// to the parent aggregation, you can conditionally add them
|
||||
if (Math.random() >= 0.5) {
|
||||
committersAgg = A.committers.aggs(
|
||||
committersAgg, A.line_stats.stats('stat.insertions')
|
||||
)
|
||||
}
|
||||
|
||||
const { body } = await client.search({
|
||||
index: 'git',
|
||||
body: Q(
|
||||
Q.size(0),
|
||||
A(committersAgg)
|
||||
)
|
||||
})
|
||||
|
||||
console.log(body.aggregations)
|
||||
}
|
||||
|
||||
run().catch(console.log)
|
||||
66
dsl/examples/extend-queries.ts
Normal file
66
dsl/examples/extend-queries.ts
Normal file
@ -0,0 +1,66 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { Client } from '../../'
|
||||
import { Q } from '../'
|
||||
|
||||
async function run () {
|
||||
const client = new Client({ node: 'http://localhost:9200' })
|
||||
|
||||
// the result must be fixes done by delvedor
|
||||
let query = Q.bool(
|
||||
Q.must(Q.match('description', 'fix')),
|
||||
Q.filter(Q.term('author.name', 'delvedor'))
|
||||
)
|
||||
|
||||
// Based on a condition, we want to enrich our query
|
||||
if (Math.random() >= 0.5) {
|
||||
// the results must be fixes done by delvedor
|
||||
// on test or do files
|
||||
const should = Q.should(
|
||||
Q.term('files', 'test'),
|
||||
Q.term('files', 'docs')
|
||||
)
|
||||
// The code below produces the same as the one above
|
||||
// If you need to check multiple values for the same key,
|
||||
// you can pass an array of strings instead of calling
|
||||
// the query function multiple times
|
||||
// ```
|
||||
// const should = Q.should(
|
||||
// Q.term('files', ['test', 'docs'])
|
||||
// )
|
||||
// ```
|
||||
query = Q.and(query, should)
|
||||
} else {
|
||||
// the results must be fixes or features done by delvedor
|
||||
const must = Q.must(
|
||||
Q.match('description', 'feature')
|
||||
)
|
||||
query = Q.or(query, must)
|
||||
}
|
||||
|
||||
const { body } = await client.search({
|
||||
index: 'git',
|
||||
body: query
|
||||
})
|
||||
|
||||
console.log(body.hits.hits)
|
||||
}
|
||||
|
||||
run().catch(console.log)
|
||||
40
dsl/examples/fix-commit.ts
Normal file
40
dsl/examples/fix-commit.ts
Normal file
@ -0,0 +1,40 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { Client } from '../../'
|
||||
import { Q } from '../'
|
||||
|
||||
async function run () {
|
||||
const client = new Client({ node: 'http://localhost:9200' })
|
||||
|
||||
// search commits that contains 'fix' but do not changes test files
|
||||
const { body } = await client.search({
|
||||
index: 'git',
|
||||
body: Q.bool(
|
||||
// You can avoid to call `Q.must`, as any query will be
|
||||
// sent inside a `must` block unless specified otherwise
|
||||
Q.must(Q.match('description', 'fix')),
|
||||
Q.mustNot(Q.term('files', 'test'))
|
||||
)
|
||||
})
|
||||
|
||||
console.log(body.hits.hits)
|
||||
}
|
||||
|
||||
run().catch(console.log)
|
||||
39
dsl/examples/last-commits.ts
Normal file
39
dsl/examples/last-commits.ts
Normal file
@ -0,0 +1,39 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { Client } from '../../'
|
||||
import { Q } from '../'
|
||||
|
||||
async function run () {
|
||||
const client = new Client({ node: 'http://localhost:9200' })
|
||||
|
||||
// last 10 commits for 'elasticsearch-js' repo
|
||||
const { body } = await client.search({
|
||||
index: 'git',
|
||||
body: Q(
|
||||
Q.term('repository', 'elasticsearch-js'),
|
||||
Q.sort('committed_date', { order: 'desc' }),
|
||||
Q.size(10)
|
||||
)
|
||||
})
|
||||
|
||||
console.log(body.hits.hits)
|
||||
}
|
||||
|
||||
run().catch(console.log)
|
||||
159
dsl/examples/loadRepo.js
Normal file
159
dsl/examples/loadRepo.js
Normal file
@ -0,0 +1,159 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
const minimist = require('minimist')
|
||||
const Git = require('simple-git/promise')
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
|
||||
start(minimist(process.argv.slice(2), {
|
||||
string: ['elasticsearch', 'index', 'repository'],
|
||||
default: {
|
||||
elasticsearch: 'http://localhost:9200',
|
||||
index: 'git',
|
||||
repository: 'elasticsearch-js'
|
||||
}
|
||||
}))
|
||||
|
||||
async function start ({ elasticsearch, index, repository }) {
|
||||
const client = new Client({ node: elasticsearch })
|
||||
await createIndex({ client, index })
|
||||
await loadHistory({ client, index, repository })
|
||||
}
|
||||
|
||||
async function createIndex ({ client, index }) {
|
||||
const userMapping = {
|
||||
properties: {
|
||||
name: {
|
||||
type: 'text',
|
||||
fields: {
|
||||
keyword: { type: 'keyword' }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
await client.indices.create({
|
||||
index,
|
||||
body: {
|
||||
settings: {
|
||||
// just one shard, no replicas for testing
|
||||
number_of_shards: 1,
|
||||
number_of_replicas: 0,
|
||||
// custom analyzer for analyzing file paths
|
||||
analysis: {
|
||||
analyzer: {
|
||||
file_path: {
|
||||
type: 'custom',
|
||||
tokenizer: 'path_hierarchy',
|
||||
filter: ['lowercase']
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
mappings: {
|
||||
properties: {
|
||||
repository: { type: 'keyword' },
|
||||
sha: { type: 'keyword' },
|
||||
author: userMapping,
|
||||
authored_date: { type: 'date' },
|
||||
committer: userMapping,
|
||||
committed_date: { type: 'date' },
|
||||
parent_shas: { type: 'keyword' },
|
||||
description: { type: 'text', analyzer: 'snowball' },
|
||||
files: { type: 'text', analyzer: 'file_path', fielddata: true }
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
async function loadHistory ({ client, index, repository }) {
|
||||
const git = Git(process.cwd())
|
||||
// Get the result of 'git log'
|
||||
const { all: history } = await git.log({
|
||||
format: {
|
||||
hash: '%H',
|
||||
parentHashes: '%P',
|
||||
authorName: '%an',
|
||||
authorEmail: '%ae',
|
||||
authorDate: '%ai',
|
||||
committerName: '%cn',
|
||||
committerEmail: '%ce',
|
||||
committerDate: '%cd',
|
||||
subject: '%s'
|
||||
}
|
||||
})
|
||||
|
||||
// Get the stats for every commit
|
||||
for (var i = 0; i < history.length; i++) {
|
||||
const commit = history[i]
|
||||
const stat = await git.show(['--numstat', '--oneline', commit.hash])
|
||||
commit.files = []
|
||||
commit.stat = stat
|
||||
.split('\n')
|
||||
.slice(1)
|
||||
.filter(Boolean)
|
||||
.reduce((acc, val, index) => {
|
||||
const [insertions, deletions, file] = val.split('\t')
|
||||
commit.files.push(file)
|
||||
acc.files++
|
||||
acc.insertions += Number(insertions)
|
||||
acc.deletions += Number(deletions)
|
||||
return acc
|
||||
}, { insertions: 0, deletions: 0, files: 0 })
|
||||
}
|
||||
|
||||
// Index the data, 500 commits at a time
|
||||
var count = 0
|
||||
var chunk = history.slice(count, count + 500)
|
||||
while (chunk.length > 0) {
|
||||
const { body } = await client.bulk({
|
||||
body: chunk.reduce((body, commit) => {
|
||||
body.push({ index: { _index: index, _id: commit.hash } })
|
||||
body.push({
|
||||
repository,
|
||||
sha: commit.hash,
|
||||
author: {
|
||||
name: commit.authorName,
|
||||
email: commit.authorEmail
|
||||
},
|
||||
authored_date: new Date(commit.authorDate).toISOString(),
|
||||
committer: {
|
||||
name: commit.committerName,
|
||||
email: commit.committerEmail
|
||||
},
|
||||
committed_date: new Date(commit.committerDate).toISOString(),
|
||||
parent_shas: commit.parentHashes,
|
||||
description: commit.subject,
|
||||
files: commit.files,
|
||||
stat: commit.stat
|
||||
})
|
||||
return body
|
||||
}, [])
|
||||
})
|
||||
if (body.errors) {
|
||||
console.log(JSON.stringify(body.items[0], null, 2))
|
||||
process.exit(1)
|
||||
}
|
||||
count += 500
|
||||
chunk = history.slice(count, count + 500)
|
||||
}
|
||||
}
|
||||
47
dsl/examples/top-committers.ts
Normal file
47
dsl/examples/top-committers.ts
Normal file
@ -0,0 +1,47 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { Client } from '../../'
|
||||
import { Q, A } from '../'
|
||||
|
||||
async function run () {
|
||||
const client = new Client({ node: 'http://localhost:9200' })
|
||||
|
||||
// top committers aggregation
|
||||
// 'committers' is the name of the aggregation
|
||||
const committersAgg = A.committers.terms(
|
||||
{ field: 'committer.name.keyword' },
|
||||
// you can nest multiple aggregations by
|
||||
// passing them to the aggregation constructor
|
||||
// 'line_stats' is the name of the aggregation
|
||||
A.line_stats.stats({ field: 'stat.insertions' })
|
||||
)
|
||||
const { body } = await client.search({
|
||||
index: 'git',
|
||||
body: Q(
|
||||
Q.matchAll(),
|
||||
Q.size(0),
|
||||
A(committersAgg)
|
||||
)
|
||||
})
|
||||
|
||||
console.log(body.aggregations)
|
||||
}
|
||||
|
||||
run().catch(console.log)
|
||||
61
dsl/examples/top-month.ts
Normal file
61
dsl/examples/top-month.ts
Normal file
@ -0,0 +1,61 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { Client } from '../../'
|
||||
import { Q, A } from '../'
|
||||
|
||||
async function run () {
|
||||
const client = new Client({ node: 'http://localhost:9200' })
|
||||
|
||||
const committers = A.committers.terms(
|
||||
{ field: 'committer.name.keyword' },
|
||||
A.insertions.sum({ field: 'stat.insertions' })
|
||||
)
|
||||
const topCommittersPerMonth = A.top_committer_per_month.maxBucket(
|
||||
{ bucket_path: 'committers>insertions' }
|
||||
)
|
||||
const commitsPerMonth = A.commits_per_month.dateHistogram(
|
||||
{
|
||||
field: 'committed_date',
|
||||
interval: 'day',
|
||||
min_doc_count: 1,
|
||||
order: { _count: 'desc' }
|
||||
},
|
||||
// nested aggregations
|
||||
committers,
|
||||
topCommittersPerMonth
|
||||
)
|
||||
const topCommittersPerMonthGlobal = A.top_committer_per_month.maxBucket(
|
||||
{ bucket_path: 'commits_per_month>top_committer_per_month' }
|
||||
)
|
||||
|
||||
const { body: topMonths } = await client.search({
|
||||
index: 'git',
|
||||
body: Q(
|
||||
// we want to know the top month for 'delvedor'
|
||||
Q.filter(Q.term('author', 'delvedor')),
|
||||
Q.size(0),
|
||||
A(commitsPerMonth, topCommittersPerMonthGlobal)
|
||||
)
|
||||
})
|
||||
|
||||
console.log(topMonths)
|
||||
}
|
||||
|
||||
run().catch(console.log)
|
||||
23
dsl/index.d.ts
vendored
Normal file
23
dsl/index.d.ts
vendored
Normal file
@ -0,0 +1,23 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import Q from './lib/query-helpers'
|
||||
import A from './lib/aggregation-helpers'
|
||||
|
||||
export { Q, A }
|
||||
25
dsl/index.js
Normal file
25
dsl/index.js
Normal file
@ -0,0 +1,25 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
const Q = require('./lib/query-helpers').default
|
||||
const A = require('./lib/aggregation-helpers').default
|
||||
|
||||
module.exports = { Q, A }
|
||||
385
dsl/src/aggregation-helpers.ts
Normal file
385
dsl/src/aggregation-helpers.ts
Normal file
@ -0,0 +1,385 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
/* eslint camelcase: 0 */
|
||||
/* eslint no-undef: 0 */
|
||||
/* eslint no-use-before-define: 0 */
|
||||
/* eslint no-redeclare: 0 */
|
||||
|
||||
import * as t from './types'
|
||||
|
||||
interface anyObject {
|
||||
[key: string]: any
|
||||
}
|
||||
|
||||
type aggsOptions = anyObject | string
|
||||
|
||||
function _A (...aggregations: any[]): any {
|
||||
return {
|
||||
aggs: Object.assign.apply(null, aggregations.filter(falsy))
|
||||
}
|
||||
}
|
||||
|
||||
interface Aggregations {
|
||||
(...aggregations: any[]): any
|
||||
[name: string]: {
|
||||
// add aggregations to a parent aggregation
|
||||
aggs(...aggregations: any[]): t.Aggregation
|
||||
// Metric aggregations
|
||||
avg(opts: aggsOptions): t.Aggregation
|
||||
weightedAvg(opts: aggsOptions): t.Aggregation
|
||||
cardinality(opts: aggsOptions): t.Aggregation
|
||||
extendedStats(opts: aggsOptions): t.Aggregation
|
||||
geoBounds(opts: aggsOptions): t.Aggregation
|
||||
geoCentroid(opts: aggsOptions): t.Aggregation
|
||||
max(opts: aggsOptions): t.Aggregation
|
||||
min(opts: aggsOptions): t.Aggregation
|
||||
percentiles(opts: aggsOptions): t.Aggregation
|
||||
percentileRanks(opts: aggsOptions): t.Aggregation
|
||||
scriptedMetric(opts: aggsOptions): t.Aggregation
|
||||
stats(opts: aggsOptions): t.Aggregation
|
||||
sum(opts: aggsOptions): t.Aggregation
|
||||
topHits(opts: aggsOptions): t.Aggregation
|
||||
valueCount(opts: aggsOptions): t.Aggregation
|
||||
medianAbsoluteDeviation(opts: aggsOptions): t.Aggregation
|
||||
// Buckets aggregations
|
||||
adjacencyMatrix(opts: aggsOptions, ...aggregations: any[]): t.Aggregation
|
||||
autoDateHistogram(opts: aggsOptions, ...aggregations: any[]): t.Aggregation
|
||||
children(opts: aggsOptions, ...aggregations: any[]): t.Aggregation
|
||||
composite(opts: aggsOptions, ...aggregations: any[]): t.Aggregation
|
||||
dateHistogram(opts: aggsOptions, ...aggregations: any[]): t.Aggregation
|
||||
dateRange(opts: aggsOptions, ...aggregations: any[]): t.Aggregation
|
||||
diversifiedSampler(opts: aggsOptions, ...aggregations: any[]): t.Aggregation
|
||||
filter(opts: aggsOptions, ...aggregations: any[]): t.Aggregation
|
||||
filters(opts: aggsOptions, ...aggregations: any[]): t.Aggregation
|
||||
geoDistance(opts: aggsOptions, ...aggregations: any[]): t.Aggregation
|
||||
geohashGrid(opts: aggsOptions, ...aggregations: any[]): t.Aggregation
|
||||
geotileGrid(opts: aggsOptions, ...aggregations: any[]): t.Aggregation
|
||||
global(opts: aggsOptions, ...aggregations: any[]): t.Aggregation
|
||||
histogram(opts: aggsOptions, ...aggregations: any[]): t.Aggregation
|
||||
ipRange(opts: aggsOptions, ...aggregations: any[]): t.Aggregation
|
||||
missing(opts: aggsOptions, ...aggregations: any[]): t.Aggregation
|
||||
nested(opts: aggsOptions, ...aggregations: any[]): t.Aggregation
|
||||
parent(opts: aggsOptions, ...aggregations: any[]): t.Aggregation
|
||||
range(opts: aggsOptions, ...aggregations: any[]): t.Aggregation
|
||||
reverseNested(opts: aggsOptions, ...aggregations: any[]): t.Aggregation
|
||||
sampler(opts: aggsOptions, ...aggregations: any[]): t.Aggregation
|
||||
significantTerms(opts: aggsOptions, ...aggregations: any[]): t.Aggregation
|
||||
significantText(opts: aggsOptions, ...aggregations: any[]): t.Aggregation
|
||||
terms(opts: aggsOptions, ...aggregations: any[]): t.Aggregation
|
||||
// Pipeline aggregations
|
||||
avgBucket (opts: aggsOptions): t.Aggregation
|
||||
derivative (opts: aggsOptions): t.Aggregation
|
||||
maxBucket (opts: aggsOptions): t.Aggregation
|
||||
minBucket (opts: aggsOptions): t.Aggregation
|
||||
sumBucket (opts: aggsOptions): t.Aggregation
|
||||
statsBucket (opts: aggsOptions): t.Aggregation
|
||||
extendedStatsBucket (opts: aggsOptions): t.Aggregation
|
||||
percentilesBucket (opts: aggsOptions): t.Aggregation
|
||||
movingAvg (opts: aggsOptions): t.Aggregation
|
||||
movingFn (opts: aggsOptions): t.Aggregation
|
||||
cumulativeSum (opts: aggsOptions): t.Aggregation
|
||||
bucketScript (opts: aggsOptions): t.Aggregation
|
||||
bucketSelector (opts: aggsOptions): t.Aggregation
|
||||
bucketSort (opts: aggsOptions): t.Aggregation
|
||||
serialDiff (opts: aggsOptions): t.Aggregation
|
||||
// Matrix aggregations
|
||||
matrixStats (opts: aggsOptions): t.Aggregation
|
||||
}
|
||||
}
|
||||
|
||||
const aggregations = {
|
||||
get: function (target: unknown, name: string) {
|
||||
return {
|
||||
// add aggregations to a parent aggregation
|
||||
aggs (...aggregations: any[]): t.Aggregation {
|
||||
return updateAggsObject(name, aggregations)
|
||||
},
|
||||
|
||||
// Metric aggregations
|
||||
avg (opts: aggsOptions): t.Aggregation {
|
||||
return generateAggsObject('avg', name, isString(opts) ? 'field' : null, opts)
|
||||
},
|
||||
|
||||
weightedAvg (opts: aggsOptions): t.Aggregation {
|
||||
return generateAggsObject('weighted_avg', name, null, opts)
|
||||
},
|
||||
|
||||
cardinality (opts: aggsOptions): t.Aggregation {
|
||||
return generateAggsObject('cardinality', name, isString(opts) ? 'field' : null, opts)
|
||||
},
|
||||
|
||||
extendedStats (opts: aggsOptions): t.Aggregation {
|
||||
return generateAggsObject('extended_stats', name, isString(opts) ? 'field' : null, opts)
|
||||
},
|
||||
|
||||
geoBounds (opts: aggsOptions): t.Aggregation {
|
||||
return generateAggsObject('geo_bounds', name, isString(opts) ? 'field' : null, opts)
|
||||
},
|
||||
|
||||
geoCentroid (opts: aggsOptions): t.Aggregation {
|
||||
return generateAggsObject('geo_centroid', name, isString(opts) ? 'field' : null, opts)
|
||||
},
|
||||
|
||||
max (opts: aggsOptions): t.Aggregation {
|
||||
return generateAggsObject('max', name, isString(opts) ? 'field' : null, opts)
|
||||
},
|
||||
|
||||
min (opts: aggsOptions): t.Aggregation {
|
||||
return generateAggsObject('min', name, isString(opts) ? 'field' : null, opts)
|
||||
},
|
||||
|
||||
percentiles (opts: aggsOptions): t.Aggregation {
|
||||
return generateAggsObject('percentiles', name, isString(opts) ? 'field' : null, opts)
|
||||
},
|
||||
|
||||
percentileRanks (opts: aggsOptions): t.Aggregation {
|
||||
return generateAggsObject('percentile_ranks', name, null, opts)
|
||||
},
|
||||
|
||||
scriptedMetric (opts: aggsOptions): t.Aggregation {
|
||||
return generateAggsObject('scripted_metric', name, null, opts)
|
||||
},
|
||||
|
||||
stats (opts: aggsOptions): t.Aggregation {
|
||||
return generateAggsObject('stats', name, isString(opts) ? 'field' : null, opts)
|
||||
},
|
||||
|
||||
sum (opts: aggsOptions): t.Aggregation {
|
||||
return generateAggsObject('sum', name, isString(opts) ? 'field' : null, opts)
|
||||
},
|
||||
|
||||
topHits (opts: aggsOptions): t.Aggregation {
|
||||
return generateAggsObject('top_hits', name, null, opts)
|
||||
},
|
||||
|
||||
valueCount (opts: aggsOptions): t.Aggregation {
|
||||
return generateAggsObject('value_count', name, isString(opts) ? 'field' : null, opts)
|
||||
},
|
||||
|
||||
medianAbsoluteDeviation (opts: aggsOptions): t.Aggregation {
|
||||
return generateAggsObject('median_absolute_deviation', name, isString(opts) ? 'field' : null, opts)
|
||||
},
|
||||
|
||||
// Buckets aggregations
|
||||
adjacencyMatrix (opts: aggsOptions, ...aggregations: any[]): t.Aggregation {
|
||||
return generateAggsObject('adjacency_matrix', name, null, opts, aggregations)
|
||||
},
|
||||
|
||||
autoDateHistogram (opts: aggsOptions, ...aggregations: any[]): t.Aggregation {
|
||||
return generateAggsObject('auto_date_histogram', name, isString(opts) ? 'field' : null, opts, aggregations)
|
||||
},
|
||||
|
||||
children (opts: aggsOptions, ...aggregations: any[]): t.Aggregation {
|
||||
return generateAggsObject('children', name, isString(opts) ? 'type' : null, opts, aggregations)
|
||||
},
|
||||
|
||||
composite (opts: aggsOptions, ...aggregations: any[]): t.Aggregation {
|
||||
return generateAggsObject('composite', name, null, opts, aggregations)
|
||||
},
|
||||
|
||||
dateHistogram (opts: aggsOptions, ...aggregations: any[]): t.Aggregation {
|
||||
return generateAggsObject('date_histogram', name, null, opts, aggregations)
|
||||
},
|
||||
|
||||
dateRange (opts: aggsOptions, ...aggregations: any[]): t.Aggregation {
|
||||
return generateAggsObject('date_range', name, null, opts, aggregations)
|
||||
},
|
||||
|
||||
diversifiedSampler (opts: aggsOptions, ...aggregations: any[]): t.Aggregation {
|
||||
return generateAggsObject('diversified_sampler', name, isString(opts) ? 'field' : null, opts, aggregations)
|
||||
},
|
||||
|
||||
filter (opts: aggsOptions, ...aggregations: any[]): t.Aggregation {
|
||||
return generateAggsObject('filter', name, null, opts, aggregations)
|
||||
},
|
||||
|
||||
filters (opts: aggsOptions, ...aggregations: any[]): t.Aggregation {
|
||||
return generateAggsObject('filters', name, null, opts, aggregations)
|
||||
},
|
||||
|
||||
geoDistance (opts: aggsOptions, ...aggregations: any[]): t.Aggregation {
|
||||
return generateAggsObject('geo_distance', name, null, opts, aggregations)
|
||||
},
|
||||
|
||||
geohashGrid (opts: aggsOptions, ...aggregations: any[]): t.Aggregation {
|
||||
return generateAggsObject('geohash_grid', name, isString(opts) ? 'field' : null, opts, aggregations)
|
||||
},
|
||||
|
||||
geotileGrid (opts: aggsOptions, ...aggregations: any[]): t.Aggregation {
|
||||
return generateAggsObject('geotile_grid', name, isString(opts) ? 'field' : null, opts, aggregations)
|
||||
},
|
||||
|
||||
global (opts: aggsOptions, ...aggregations: any[]): t.Aggregation {
|
||||
return generateAggsObject('global', name, null, opts, aggregations)
|
||||
},
|
||||
|
||||
histogram (opts: aggsOptions, ...aggregations: any[]): t.Aggregation {
|
||||
return generateAggsObject('histogram', name, null, opts, aggregations)
|
||||
},
|
||||
|
||||
ipRange (opts: aggsOptions, ...aggregations: any[]): t.Aggregation {
|
||||
return generateAggsObject('ip_range', name, null, opts, aggregations)
|
||||
},
|
||||
|
||||
missing (opts: aggsOptions, ...aggregations: any[]): t.Aggregation {
|
||||
return generateAggsObject('missing', name, isString(opts) ? 'field' : null, opts, aggregations)
|
||||
},
|
||||
|
||||
nested (opts: aggsOptions, ...aggregations: any[]): t.Aggregation {
|
||||
return generateAggsObject('nested', name, isString(opts) ? 'path' : null, opts, aggregations)
|
||||
},
|
||||
|
||||
parent (opts: aggsOptions, ...aggregations: any[]): t.Aggregation {
|
||||
return generateAggsObject('parent', name, isString(opts) ? 'type' : null, opts, aggregations)
|
||||
},
|
||||
|
||||
range (opts: aggsOptions, ...aggregations: any[]): t.Aggregation {
|
||||
return generateAggsObject('range', name, null, opts, aggregations)
|
||||
},
|
||||
|
||||
reverseNested (opts: aggsOptions, ...aggregations: any[]): t.Aggregation {
|
||||
return generateAggsObject('reverse_nested', name, null, opts, aggregations)
|
||||
},
|
||||
|
||||
sampler (opts: aggsOptions, ...aggregations: any[]): t.Aggregation {
|
||||
return generateAggsObject('sampler', name, null, opts, aggregations)
|
||||
},
|
||||
|
||||
significantTerms (opts: aggsOptions, ...aggregations: any[]): t.Aggregation {
|
||||
return generateAggsObject('significant_terms', name, isString(opts) ? 'field' : null, opts, aggregations)
|
||||
},
|
||||
|
||||
significantText (opts: aggsOptions, ...aggregations: any[]): t.Aggregation {
|
||||
return generateAggsObject('significant_text', name, isString(opts) ? 'field' : null, opts, aggregations)
|
||||
},
|
||||
|
||||
terms (opts: aggsOptions, ...aggregations: any[]): t.Aggregation {
|
||||
return generateAggsObject('terms', name, isString(opts) ? 'field' : null, opts, aggregations)
|
||||
},
|
||||
|
||||
// Pipeline aggregations
|
||||
avgBucket (opts: aggsOptions): t.Aggregation {
|
||||
return generateAggsObject('avg_bucket', name, isString(opts) ? 'buckets_path' : null, opts)
|
||||
},
|
||||
|
||||
derivative (opts: aggsOptions): t.Aggregation {
|
||||
return generateAggsObject('derivative', name, isString(opts) ? 'buckets_path' : null, opts)
|
||||
},
|
||||
|
||||
maxBucket (opts: aggsOptions): t.Aggregation {
|
||||
return generateAggsObject('max_bucket', name, isString(opts) ? 'buckets_path' : null, opts)
|
||||
},
|
||||
|
||||
minBucket (opts: aggsOptions): t.Aggregation {
|
||||
return generateAggsObject('min_bucket', name, isString(opts) ? 'buckets_path' : null, opts)
|
||||
},
|
||||
|
||||
sumBucket (opts: aggsOptions): t.Aggregation {
|
||||
return generateAggsObject('sum_bucket', name, isString(opts) ? 'buckets_path' : null, opts)
|
||||
},
|
||||
|
||||
statsBucket (opts: aggsOptions): t.Aggregation {
|
||||
return generateAggsObject('stats_bucket', name, isString(opts) ? 'buckets_path' : null, opts)
|
||||
},
|
||||
|
||||
extendedStatsBucket (opts: aggsOptions): t.Aggregation {
|
||||
return generateAggsObject('extended_stats_bucket', name, isString(opts) ? 'buckets_path' : null, opts)
|
||||
},
|
||||
|
||||
percentilesBucket (opts: aggsOptions): t.Aggregation {
|
||||
return generateAggsObject('percentiles_bucket', name, isString(opts) ? 'buckets_path' : null, opts)
|
||||
},
|
||||
|
||||
movingAvg (opts: aggsOptions): t.Aggregation {
|
||||
return generateAggsObject('moving_avg', name, isString(opts) ? 'buckets_path' : null, opts)
|
||||
},
|
||||
|
||||
movingFn (opts: aggsOptions): t.Aggregation {
|
||||
return generateAggsObject('moving_fn', name, null, opts)
|
||||
},
|
||||
|
||||
cumulativeSum (opts: aggsOptions): t.Aggregation {
|
||||
return generateAggsObject('cumulative_sum', name, isString(opts) ? 'buckets_path' : null, opts)
|
||||
},
|
||||
|
||||
bucketScript (opts: aggsOptions): t.Aggregation {
|
||||
return generateAggsObject('bucket_script', name, null, opts)
|
||||
},
|
||||
|
||||
bucketSelector (opts: aggsOptions): t.Aggregation {
|
||||
return generateAggsObject('bucket_selector', name, null, opts)
|
||||
},
|
||||
|
||||
bucketSort (opts: aggsOptions): t.Aggregation {
|
||||
return generateAggsObject('bucket_sort', name, null, opts)
|
||||
},
|
||||
|
||||
serialDiff (opts: aggsOptions): t.Aggregation {
|
||||
return generateAggsObject('serial_diff', name, isString(opts) ? 'buckets_path' : null, opts)
|
||||
},
|
||||
|
||||
// Matrix aggregations
|
||||
matrixStats (opts: aggsOptions): t.Aggregation {
|
||||
return generateAggsObject('matrix_stats', name, isString(opts) ? 'fields' : null, opts)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const A = new Proxy(_A, aggregations) as Aggregations
|
||||
|
||||
function generateAggsObject (type: string, name: string, defaultField: string | null, opts: any = {}, aggregations: any[] = []): t.Aggregation {
|
||||
if (typeof opts === 'string' && typeof defaultField === 'string') {
|
||||
opts = { [defaultField]: opts }
|
||||
} else if (typeof opts === 'string' && defaultField === null) {
|
||||
throw new Error('This method does not support shorthand options')
|
||||
}
|
||||
|
||||
if (aggregations.length > 0) {
|
||||
return {
|
||||
[name]: {
|
||||
[type]: opts,
|
||||
aggs: Object.assign.apply(null, aggregations.filter(falsy))
|
||||
}
|
||||
}
|
||||
} else {
|
||||
return {
|
||||
[name]: {
|
||||
[type]: opts
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function updateAggsObject (name: string, aggregations: any[]): t.Aggregation {
|
||||
const [main, ...others] = aggregations.filter(falsy)
|
||||
main[name].aggs = Object.assign(main[name].aggs || {}, ...others)
|
||||
return main
|
||||
}
|
||||
|
||||
function falsy (val: any): boolean {
|
||||
return !!val
|
||||
}
|
||||
|
||||
function isString (val: any): val is string {
|
||||
return typeof val === 'string'
|
||||
}
|
||||
|
||||
export default A
|
||||
726
dsl/src/query-helpers.ts
Normal file
726
dsl/src/query-helpers.ts
Normal file
@ -0,0 +1,726 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
/* eslint camelcase: 0 */
|
||||
/* eslint no-undef: 0 */
|
||||
/* eslint no-use-before-define: 0 */
|
||||
/* eslint no-redeclare: 0 */
|
||||
|
||||
import deepMerge from 'deepmerge'
|
||||
import * as t from './types'
|
||||
|
||||
function Q (...blocks: t.AnyQuery[]): Record<string, any> {
|
||||
const { aggs,
|
||||
collapse,
|
||||
explain,
|
||||
from,
|
||||
highlight,
|
||||
indices_boost,
|
||||
min_score,
|
||||
post_filter,
|
||||
profile,
|
||||
rescore,
|
||||
script_fields,
|
||||
search_after,
|
||||
size,
|
||||
slice,
|
||||
sort,
|
||||
_source,
|
||||
suggest,
|
||||
terminate_after,
|
||||
timeout,
|
||||
track_scores,
|
||||
version,
|
||||
...queries
|
||||
} = Object.assign.apply({}, blocks.flat())
|
||||
|
||||
const query: t.AnyQuery[] = Object.keys(queries).map(q => ({ [q]: queries[q] }))
|
||||
const body: Record<string, any> = query.length > 0 ? Q.bool(...query) : {}
|
||||
if (aggs) body.aggs = aggs
|
||||
if (collapse) body.collapse = collapse
|
||||
if (explain) body.explain = explain
|
||||
if (from) body.from = from
|
||||
if (highlight) body.highlight = highlight
|
||||
if (indices_boost) body.indices_boost = indices_boost
|
||||
if (min_score) body.min_score = min_score
|
||||
if (post_filter) body.post_filter = post_filter
|
||||
if (profile) body.profile = profile
|
||||
if (rescore) body.rescore = rescore
|
||||
if (script_fields) body.script_fields = script_fields
|
||||
if (search_after) body.search_after = search_after
|
||||
if (size) body.size = size
|
||||
if (slice) body.slice = slice
|
||||
if (sort) body.sort = sort
|
||||
if (_source) body._source = _source
|
||||
if (suggest) body.suggest = suggest
|
||||
if (terminate_after) body.terminate_after = terminate_after
|
||||
if (timeout) body.timeout = timeout
|
||||
if (track_scores) body.track_scores = track_scores
|
||||
if (version) body.version = version
|
||||
|
||||
return body
|
||||
}
|
||||
|
||||
Object.defineProperty(Q, 'name', { writable: true })
|
||||
|
||||
Q.param = function param (key: string) {
|
||||
return `###${key}###`
|
||||
}
|
||||
|
||||
Q.compile = function compile<TInput = unknown> (query: Record<string, any>): t.compiledFunction<TInput> {
|
||||
let stringified = JSON.stringify(query)
|
||||
const keys: string[] = []
|
||||
const matches = stringified.match(/"###\w+###"/g)
|
||||
if (matches === null) {
|
||||
throw new Error('The query does not contain any use of `Q.params`')
|
||||
}
|
||||
for (const match of matches) {
|
||||
const key = match.slice(4, -4)
|
||||
keys.push(key)
|
||||
stringified = stringified.replace(new RegExp(match), `input[${JSON.stringify(key)}]`)
|
||||
}
|
||||
const code = `
|
||||
if (input == null) {
|
||||
throw new Error('Input must not be empty')
|
||||
}
|
||||
const keys = ${JSON.stringify(keys)}
|
||||
for (const key of keys) {
|
||||
if (input[key] === undefined) {
|
||||
throw new Error('Missing key: ' + key)
|
||||
}
|
||||
}
|
||||
return ${stringified}
|
||||
`
|
||||
// @ts-ignore
|
||||
return new Function('input', code) // eslint-disable-line
|
||||
}
|
||||
|
||||
function match (key: string, val: string, opts?: Record<string, any>): t.Condition
|
||||
function match (key: string, val: string[], opts?: Record<string, any>): t.Condition[]
|
||||
function match (key: string, val: any, opts?: Record<string, any>): t.Condition | t.Condition[] {
|
||||
return generateQueryObject('match', key, val, opts)
|
||||
}
|
||||
Q.match = match
|
||||
|
||||
Q.matchPhrase = function matchPhrase (key: string, val: string, opts?: Record<string, any>): t.Condition {
|
||||
return generateQueryObject('match_phrase', key, val, opts)
|
||||
}
|
||||
|
||||
Q.matchPhrasePrefix = function matchPhrasePrefix (key: string, val: string, opts?: Record<string, any>): t.Condition {
|
||||
return generateQueryObject('match_phrase_prefix', key, val, opts)
|
||||
}
|
||||
|
||||
Q.multiMatch = function multiMatch (keys: string[], val: string, opts?: Record<string, any>): t.Condition {
|
||||
return {
|
||||
multi_match: {
|
||||
query: val,
|
||||
fields: keys,
|
||||
...opts
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Q.matchAll = function matchAll (opts?: Record<string, any>): t.Condition {
|
||||
return { match_all: { ...opts } }
|
||||
}
|
||||
|
||||
Q.matchNone = function matchNone (): t.Condition {
|
||||
return { match_none: {} }
|
||||
}
|
||||
|
||||
Q.common = function common (key: string, val: string, opts: Record<string, any>): t.Condition {
|
||||
return generateQueryObject('common', key, val, opts)
|
||||
}
|
||||
|
||||
Q.queryString = function queryString (val: string, opts: Record<string, any>): t.Condition {
|
||||
return {
|
||||
query_string: {
|
||||
query: val,
|
||||
...opts
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Q.simpleQueryString = function simpleQueryString (val: string, opts: Record<string, any>): t.Condition {
|
||||
return {
|
||||
simple_query_string: {
|
||||
query: val,
|
||||
...opts
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Q.term = function term (key: string, val: string | string[], opts?: Record<string, any>): t.Condition {
|
||||
if (Array.isArray(val)) {
|
||||
return Q.terms(key, val, opts)
|
||||
}
|
||||
return generateValueObject('term', key, val, opts)
|
||||
}
|
||||
|
||||
Q.terms = function terms (key: string, val: string[], opts?: Record<string, any>): t.Condition {
|
||||
return {
|
||||
terms: {
|
||||
[key]: val,
|
||||
...opts
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Q.termsSet = function termsSet (key: string, val: string[], opts: Record<string, any>): t.Condition {
|
||||
return {
|
||||
terms_set: {
|
||||
[key]: {
|
||||
terms: val,
|
||||
...opts
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Q.range = function range (key: string, val: any): t.Condition {
|
||||
return { range: { [key]: val } }
|
||||
}
|
||||
|
||||
function exists (key: string): t.Condition
|
||||
function exists (key: string[]): t.Condition[]
|
||||
function exists (key: string | string[]): t.Condition | t.Condition[] {
|
||||
if (Array.isArray(key)) {
|
||||
return key.map(k => exists(k))
|
||||
}
|
||||
return { exists: { field: key } }
|
||||
}
|
||||
Q.exists = exists
|
||||
|
||||
function prefix (key: string, val: string, opts?: Record<string, any>): t.Condition
|
||||
function prefix (key: string, val: string[], opts?: Record<string, any>): t.Condition[]
|
||||
function prefix (key: string, val: any, opts?: Record<string, any>): t.Condition | t.Condition[] {
|
||||
return generateValueObject('prefix', key, val, opts)
|
||||
}
|
||||
Q.prefix = prefix
|
||||
|
||||
function wildcard (key: string, val: string, opts?: Record<string, any>): t.Condition
|
||||
function wildcard (key: string, val: string[], opts?: Record<string, any>): t.Condition[]
|
||||
function wildcard (key: string, val: any, opts?: Record<string, any>): t.Condition | t.Condition[] {
|
||||
return generateValueObject('wildcard', key, val, opts)
|
||||
}
|
||||
Q.wildcard = wildcard
|
||||
|
||||
function regexp (key: string, val: string, opts?: Record<string, any>): t.Condition
|
||||
function regexp (key: string, val: string[], opts?: Record<string, any>): t.Condition[]
|
||||
function regexp (key: string, val: any, opts?: Record<string, any>): t.Condition | t.Condition[] {
|
||||
return generateValueObject('regexp', key, val, opts)
|
||||
}
|
||||
Q.regexp = regexp
|
||||
|
||||
function fuzzy (key: string, val: string, opts?: Record<string, any>): t.Condition
|
||||
function fuzzy (key: string, val: string[], opts?: Record<string, any>): t.Condition[]
|
||||
function fuzzy (key: string, val: any, opts?: Record<string, any>): t.Condition | t.Condition[] {
|
||||
return generateValueObject('fuzzy', key, val, opts)
|
||||
}
|
||||
Q.fuzzy = fuzzy
|
||||
|
||||
Q.ids = function ids (key: string, val: string[], opts: Record<string, any>): t.Condition {
|
||||
return {
|
||||
ids: {
|
||||
[key]: {
|
||||
values: val,
|
||||
...opts
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Q.must = function must (...queries: t.AnyQuery[]): t.MustClause {
|
||||
return { must: queries.flatMap(mergeableMust) }
|
||||
}
|
||||
|
||||
Q.should = function should (...queries: t.AnyQuery[]): t.ShouldClause {
|
||||
return { should: queries.flatMap(mergeableShould) }
|
||||
}
|
||||
|
||||
Q.mustNot = function mustNot (...queries: t.AnyQuery[]): t.MustNotClause {
|
||||
return { must_not: queries.flatMap(mergeableMustNot) }
|
||||
}
|
||||
|
||||
Q.filter = function filter (...queries: t.AnyQuery[]): t.FilterClause {
|
||||
return { filter: queries.flatMap(mergeableFilter) }
|
||||
}
|
||||
|
||||
Q.bool = function bool (...queries: t.AnyQuery[]): t.BoolQuery {
|
||||
if (queries.length === 0) {
|
||||
return { query: { bool: {} } }
|
||||
}
|
||||
|
||||
const normalizedQueries: t.BoolQueryOptions[] = queries
|
||||
.flat()
|
||||
.filter(val => {
|
||||
// filters empty objects/arrays as well
|
||||
if (typeof val === 'object' && val != null) {
|
||||
return Object.keys(val).length > 0
|
||||
}
|
||||
return !!val
|
||||
})
|
||||
.map(q => {
|
||||
if (isBool(q)) {
|
||||
if (q.query.bool._name) {
|
||||
return { must: [q.query] }
|
||||
}
|
||||
return q.query.bool
|
||||
}
|
||||
|
||||
if (isClause(q)) {
|
||||
return q
|
||||
}
|
||||
|
||||
return { must: [q] }
|
||||
})
|
||||
|
||||
const clauseCount = {
|
||||
must: 0,
|
||||
should: 0,
|
||||
must_not: 0,
|
||||
filter: 0
|
||||
}
|
||||
for (let i = 0; i < normalizedQueries.length; i++) {
|
||||
const q = normalizedQueries[i]
|
||||
if (q.must !== undefined) { clauseCount.must++ }
|
||||
if (q.should !== undefined) { clauseCount.should++ }
|
||||
if (q.must_not !== undefined) { clauseCount.must_not++ }
|
||||
if (q.filter !== undefined) { clauseCount.filter++ }
|
||||
}
|
||||
|
||||
// if there is at least one should, we cannot deep merge
|
||||
// multiple clauses, so we check how many clauses we have per type
|
||||
// and we throw an error if there is more than one per type
|
||||
if (clauseCount.should > 0) {
|
||||
if (clauseCount.must > 1 || clauseCount.must_not > 1 || clauseCount.filter > 1) {
|
||||
throw new Error('Cannot merge this query')
|
||||
}
|
||||
}
|
||||
|
||||
const bool: t.BoolQueryOptions = deepMerge.all(normalizedQueries)
|
||||
|
||||
// if there are not should clauses,
|
||||
// we can safely deepmerge queries
|
||||
return {
|
||||
query: {
|
||||
bool: optimize(bool)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Tries to flat the query based on the content
|
||||
function optimize (q: t.BoolQueryOptions): t.BoolQueryOptions {
|
||||
const clauses: t.BoolQueryOptions = {}
|
||||
|
||||
if (q.minimum_should_match !== undefined ||
|
||||
q.should !== undefined || q._name !== undefined) {
|
||||
return q
|
||||
}
|
||||
|
||||
if (q.must) {
|
||||
for (const c of q.must) {
|
||||
if (isBoolBlock(c)) {
|
||||
if (c.bool.should || c.bool._name) {
|
||||
clauses.must = clauses.must || []
|
||||
clauses.must.push(c)
|
||||
} else {
|
||||
// if we are in a BoolBlock and there is not a should clause
|
||||
// then we can "merge up" the other clauses safely
|
||||
if (c.bool.must) {
|
||||
clauses.must = clauses.must || []
|
||||
clauses.must.push.apply(clauses.must, c.bool.must)
|
||||
}
|
||||
|
||||
if (c.bool.must_not) {
|
||||
clauses.must_not = clauses.must_not || []
|
||||
clauses.must_not.push.apply(clauses.must_not, c.bool.must_not)
|
||||
}
|
||||
|
||||
if (c.bool.filter) {
|
||||
clauses.filter = clauses.filter || []
|
||||
clauses.filter.push.apply(clauses.filter, c.bool.filter)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
clauses.must = clauses.must || []
|
||||
clauses.must.push(c)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (q.filter) {
|
||||
for (const c of q.filter) {
|
||||
if (isBoolBlock(c)) {
|
||||
if (c.bool.should || c.bool.must_not || c.bool._name) {
|
||||
clauses.filter = clauses.filter || []
|
||||
clauses.filter.push(c)
|
||||
} else {
|
||||
// if there are must clauses and we are inside
|
||||
// a filter clause, we can safely move them to the upper
|
||||
// filter clause, since the score is not influenced
|
||||
if (c.bool.must) {
|
||||
clauses.filter = clauses.filter || []
|
||||
clauses.filter.push.apply(clauses.filter, c.bool.must)
|
||||
}
|
||||
|
||||
if (c.bool.filter) {
|
||||
clauses.filter = clauses.filter || []
|
||||
clauses.filter.push.apply(clauses.filter, c.bool.filter)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
clauses.filter = clauses.filter || []
|
||||
clauses.filter.push(c)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (q.must_not) {
|
||||
for (const c of q.must_not) {
|
||||
if (isBoolBlock(c)) {
|
||||
if (c.bool.should || c.bool.filter || c.bool._name) {
|
||||
clauses.must_not = clauses.must_not || []
|
||||
clauses.must_not.push(c)
|
||||
} else {
|
||||
// if 'c' is a BoolBlock and there are only must and must_not,
|
||||
// then we can swap them safely
|
||||
if (c.bool.must) {
|
||||
clauses.must_not = clauses.must_not || []
|
||||
clauses.must_not.push.apply(clauses.must_not, c.bool.must)
|
||||
}
|
||||
|
||||
if (c.bool.must_not) {
|
||||
clauses.must = clauses.must || []
|
||||
clauses.must.push.apply(clauses.must, c.bool.must_not)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
clauses.must_not = clauses.must_not || []
|
||||
clauses.must_not.push(c)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return clauses
|
||||
}
|
||||
|
||||
Q.and = function and (...queries: t.AnyQuery[]): t.BoolQuery {
|
||||
let query = queries[0]
|
||||
for (let i = 1; i < queries.length; i++) {
|
||||
query = andOp(query, queries[i])
|
||||
}
|
||||
return query as t.BoolQuery
|
||||
|
||||
function andOp (q1: t.AnyQuery, q2: t.AnyQuery): t.BoolQuery {
|
||||
const b1: t.BoolQuery = toMustQuery(q1)
|
||||
const b2: t.BoolQuery = toMustQuery(q2)
|
||||
if (!onlyShould(b1.query.bool) && !onlyShould(b2.query.bool)) {
|
||||
return deepMerge(b1, b2)
|
||||
} else {
|
||||
const { must, ...clauses } = b1.query.bool
|
||||
return Q.bool(
|
||||
must == null ? Q.must(b2) : Q.must(must, b2),
|
||||
clauses
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Q.or = function or (...queries: t.AnyQuery[]): t.BoolQuery {
|
||||
return Q.bool(Q.should(...queries))
|
||||
}
|
||||
|
||||
Q.not = function not (q: t.AnyQuery): t.BoolQuery {
|
||||
if (!isBool(q) && !isClause(q)) {
|
||||
return Q.bool(Q.mustNot(q))
|
||||
}
|
||||
|
||||
const b: t.BoolQuery = isClause(q)
|
||||
? Q.bool(q as t.BoolQueryOptions)
|
||||
: q as t.BoolQuery
|
||||
|
||||
if (onlyMust(b.query.bool)) {
|
||||
return Q.bool(Q.mustNot(...b.query.bool.must))
|
||||
} else if (onlyMustNot(b.query.bool)) {
|
||||
return Q.bool(Q.must(...b.query.bool.must_not))
|
||||
} else {
|
||||
return Q.bool(Q.mustNot(b))
|
||||
}
|
||||
}
|
||||
|
||||
Q.minShouldMatch = function minShouldMatch (min: number): t.BoolQueryOptions {
|
||||
return { minimum_should_match: min }
|
||||
}
|
||||
|
||||
Q.name = function name (queryName: string): t.BoolQueryOptions {
|
||||
return { _name: queryName }
|
||||
}
|
||||
|
||||
Q.nested = function nested (path: string, query: any, opts: Record<string, any>): t.QueryBlock {
|
||||
return {
|
||||
query: {
|
||||
nested: {
|
||||
path,
|
||||
...opts,
|
||||
...query
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Q.constantScore = function constantScore (query: any, boost: number): t.QueryBlock {
|
||||
return {
|
||||
query: {
|
||||
constant_score: {
|
||||
...query,
|
||||
boost
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Q.disMax = function disMax (queries: t.AnyQuery[], opts?: Record<string, any>): t.QueryBlock {
|
||||
return {
|
||||
query: {
|
||||
dis_max: {
|
||||
...opts,
|
||||
queries: queries.flat()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Q.functionScore = function functionScore (function_score: any): t.QueryBlock {
|
||||
return { query: { function_score } }
|
||||
}
|
||||
|
||||
Q.boosting = function boosting (boostOpts: Record<string, any>): t.QueryBlock {
|
||||
return { query: { boosting: boostOpts } }
|
||||
}
|
||||
|
||||
Q.sort = function sort (key: string | any[], opts?: Record<string, any>): t.Condition {
|
||||
if (Array.isArray(key) === true) {
|
||||
return { sort: key }
|
||||
}
|
||||
return {
|
||||
// @ts-ignore
|
||||
sort: [{ [key]: opts }]
|
||||
}
|
||||
}
|
||||
|
||||
Q.size = function size (s: number): t.Condition {
|
||||
return { size: s }
|
||||
}
|
||||
|
||||
function generateQueryObject (queryType: string, key: string, val: string, opts?: Record<string, any>): t.Condition
|
||||
function generateQueryObject (queryType: string, key: string, val: string[], opts?: Record<string, any>): t.Condition[]
|
||||
function generateQueryObject (queryType: string, key: string, val: any, opts?: Record<string, any>): t.Condition | t.Condition[] {
|
||||
if (Array.isArray(val)) {
|
||||
return val.map(v => generateQueryObject(queryType, key, v, opts))
|
||||
}
|
||||
if (opts === undefined) {
|
||||
return { [queryType]: { [key]: val } }
|
||||
}
|
||||
return {
|
||||
[queryType]: {
|
||||
[key]: {
|
||||
query: val,
|
||||
...opts
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function generateValueObject (queryType: string, key: string, val: string, opts?: Record<string, any>): t.Condition
|
||||
function generateValueObject (queryType: string, key: string, val: string[], opts?: Record<string, any>): t.Condition[]
|
||||
function generateValueObject (queryType: string, key: string, val: any, opts?: Record<string, any>): t.Condition | t.Condition[] {
|
||||
if (Array.isArray(val)) {
|
||||
return val.map(v => generateValueObject(queryType, key, v, opts))
|
||||
}
|
||||
if (opts === undefined) {
|
||||
return { [queryType]: { [key]: val } }
|
||||
}
|
||||
return {
|
||||
[queryType]: {
|
||||
[key]: {
|
||||
value: val,
|
||||
...opts
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function isBool (q: any): q is t.BoolQuery {
|
||||
return q.query && q.query.bool
|
||||
}
|
||||
|
||||
function isBoolBlock (q: any): q is t.BoolBlock {
|
||||
return !!q.bool
|
||||
}
|
||||
|
||||
function isClause (q: any): q is t.BoolQueryOptions {
|
||||
if (q.must !== undefined) return true
|
||||
if (q.should !== undefined) return true
|
||||
if (q.must_not !== undefined) return true
|
||||
if (q.filter !== undefined) return true
|
||||
if (q.minimum_should_match !== undefined) return true
|
||||
if (q._name !== undefined) return true
|
||||
return false
|
||||
}
|
||||
|
||||
function onlyShould (bool: t.BoolQueryOptions): bool is t.ShouldClause {
|
||||
if (bool.must !== undefined) return false
|
||||
if (bool.must_not !== undefined) return false
|
||||
if (bool.filter !== undefined) return false
|
||||
if (bool.minimum_should_match !== undefined) return false
|
||||
if (bool._name !== undefined) return false
|
||||
return true
|
||||
}
|
||||
|
||||
function onlyMust (bool: t.BoolQueryOptions): bool is t.MustClause {
|
||||
if (bool.should !== undefined) return false
|
||||
if (bool.must_not !== undefined) return false
|
||||
if (bool.filter !== undefined) return false
|
||||
if (bool.minimum_should_match !== undefined) return false
|
||||
if (bool._name !== undefined) return false
|
||||
return true
|
||||
}
|
||||
|
||||
function onlyMustNot (bool: t.BoolQueryOptions): bool is t.MustNotClause {
|
||||
if (bool.should !== undefined) return false
|
||||
if (bool.must !== undefined) return false
|
||||
if (bool.filter !== undefined) return false
|
||||
if (bool.minimum_should_match !== undefined) return false
|
||||
if (bool._name !== undefined) return false
|
||||
return true
|
||||
}
|
||||
|
||||
function onlyFilter (bool: t.BoolQueryOptions): bool is t.FilterClause {
|
||||
if (bool.should !== undefined) return false
|
||||
if (bool.must !== undefined) return false
|
||||
if (bool.must_not !== undefined) return false
|
||||
if (bool.minimum_should_match !== undefined) return false
|
||||
if (bool._name !== undefined) return false
|
||||
return true
|
||||
}
|
||||
|
||||
// for a given query it always return a bool query:
|
||||
// - if is a bool query returns the query
|
||||
// - if is a clause, wraps the query in a bool block
|
||||
// - if is condition, wraps the query into a must clause and then in a bool block
|
||||
function toMustQuery (query: t.AnyQuery): t.BoolQuery {
|
||||
if (isBool(query)) {
|
||||
return query
|
||||
}
|
||||
|
||||
if (isClause(query)) {
|
||||
return { query: { bool: query } }
|
||||
}
|
||||
|
||||
return { query: { bool: { must: [query] } } }
|
||||
}
|
||||
|
||||
// the aim of this mergeable functions
|
||||
// is to reduce the depth of the query objects
|
||||
function mergeableMust (q: t.AnyQuery): t.AnyQuery | t.AnyQuery[] {
|
||||
if (Array.isArray(q)) {
|
||||
return q.map(mergeableMust)
|
||||
}
|
||||
if (isBool(q)) {
|
||||
if (onlyMust(q.query.bool)) {
|
||||
return q.query.bool.must
|
||||
} else {
|
||||
return q.query
|
||||
}
|
||||
} else if (isClause(q)) {
|
||||
if (onlyMust(q)) {
|
||||
return q.must
|
||||
} else {
|
||||
return { bool: q }
|
||||
}
|
||||
} else {
|
||||
return q
|
||||
}
|
||||
}
|
||||
|
||||
function mergeableShould (q: t.AnyQuery): t.AnyQuery | t.AnyQuery[] {
|
||||
if (Array.isArray(q)) {
|
||||
return q.map(mergeableShould)
|
||||
}
|
||||
if (isBool(q)) {
|
||||
if (onlyShould(q.query.bool)) {
|
||||
return q.query.bool.should
|
||||
} else {
|
||||
return q.query
|
||||
}
|
||||
} else if (isClause(q)) {
|
||||
if (onlyShould(q)) {
|
||||
return q.should
|
||||
} else {
|
||||
return { bool: q }
|
||||
}
|
||||
} else {
|
||||
return q
|
||||
}
|
||||
}
|
||||
|
||||
function mergeableMustNot (q: t.AnyQuery): t.AnyQuery | t.AnyQuery[] {
|
||||
if (Array.isArray(q)) {
|
||||
return q.map(mergeableMustNot)
|
||||
}
|
||||
if (isBool(q)) {
|
||||
if (onlyMustNot(q.query.bool)) {
|
||||
return q.query.bool.must_not
|
||||
} else {
|
||||
return q.query
|
||||
}
|
||||
} else if (isClause(q)) {
|
||||
if (onlyMustNot(q)) {
|
||||
return q.must_not
|
||||
} else {
|
||||
return { bool: q }
|
||||
}
|
||||
} else {
|
||||
return q
|
||||
}
|
||||
}
|
||||
|
||||
function mergeableFilter (q: t.AnyQuery): t.AnyQuery | t.AnyQuery[] {
|
||||
if (Array.isArray(q)) {
|
||||
return q.map(mergeableFilter)
|
||||
}
|
||||
if (isBool(q)) {
|
||||
if (onlyFilter(q.query.bool)) {
|
||||
return q.query.bool.filter
|
||||
} else {
|
||||
return q.query
|
||||
}
|
||||
} else if (isClause(q)) {
|
||||
if (onlyFilter(q)) {
|
||||
return q.filter
|
||||
} else {
|
||||
return { bool: q }
|
||||
}
|
||||
} else {
|
||||
return q
|
||||
}
|
||||
}
|
||||
|
||||
export default Q
|
||||
75
dsl/src/types.ts
Normal file
75
dsl/src/types.ts
Normal file
@ -0,0 +1,75 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
/* eslint camelcase: 0 */
|
||||
/* eslint no-use-before-define: 0 */
|
||||
|
||||
export interface Condition {
|
||||
[key: string]: any
|
||||
}
|
||||
|
||||
export interface QueryBlock {
|
||||
query: {
|
||||
[key: string]: any
|
||||
}
|
||||
}
|
||||
|
||||
export interface MustClause {
|
||||
must: Condition[]
|
||||
}
|
||||
|
||||
export interface MustNotClause {
|
||||
must_not: Condition[]
|
||||
}
|
||||
|
||||
export interface ShouldClause {
|
||||
should: Condition[]
|
||||
minimum_should_match?: number
|
||||
}
|
||||
|
||||
export interface FilterClause {
|
||||
filter: Condition[]
|
||||
}
|
||||
|
||||
export interface BoolQuery<TOptions = BoolQueryOptions> {
|
||||
query: {
|
||||
bool: TOptions
|
||||
}
|
||||
}
|
||||
|
||||
export interface BoolBlock {
|
||||
bool: BoolQueryOptions
|
||||
}
|
||||
|
||||
export interface BoolQueryOptions {
|
||||
must?: Condition[] | BoolBlock[]
|
||||
must_not?: Condition[] | BoolBlock[]
|
||||
should?: Condition[] | BoolBlock[]
|
||||
filter?: Condition[] | BoolBlock[]
|
||||
minimum_should_match?: number
|
||||
_name?: string
|
||||
}
|
||||
|
||||
export type AnyQuery = BoolQuery | BoolQueryOptions | Condition | Condition[]
|
||||
|
||||
export interface Aggregation {
|
||||
[key: string]: any
|
||||
}
|
||||
|
||||
export type compiledFunction<TInput> = (input: TInput) => Record<string, any>;
|
||||
28
dsl/tsconfig.json
Normal file
28
dsl/tsconfig.json
Normal file
@ -0,0 +1,28 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"moduleResolution": "node",
|
||||
"declaration": true,
|
||||
"target": "es2017",
|
||||
"module": "commonjs",
|
||||
"outDir": "lib",
|
||||
"pretty": true,
|
||||
"noEmitOnError": true,
|
||||
"experimentalDecorators": true,
|
||||
"sourceMap": true,
|
||||
"emitDecoratorMetadata": true,
|
||||
"strictNullChecks": true,
|
||||
"strictPropertyInitialization": true,
|
||||
"esModuleInterop": true,
|
||||
"removeComments": true,
|
||||
"noUnusedLocals": true,
|
||||
"lib": [
|
||||
"esnext"
|
||||
]
|
||||
},
|
||||
"formatCodeOptions": {
|
||||
"identSize": 2,
|
||||
"tabSize": 2
|
||||
},
|
||||
"exclude": ["examples"],
|
||||
"include": ["./src/*.ts"]
|
||||
}
|
||||
Reference in New Issue
Block a user