8000 Run prettier by nicoabie · Pull Request #540 · join-monster/join-monster · GitHub
[go: up one dir, main page]
More Web Proxy on the site http://driver.im/
Skip to content

Run prettier #540

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 4 commits into
base: master
Choose a base branch
from
Open
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion .eslintignore
Original file line number Diff line number Diff line change
@@ -1 +0,0 @@
test-api
22 changes: 22 additions & 0 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

3 changes: 2 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
"testtsd": "npm run build &a 9E81 mp;& tsd",
"coverage": "nyc --reporter=html npm run test",
"view-coverage": "open coverage/index.html",
"lint": "eslint src test",
"lint": "eslint src test test-api",
"prettier": "prettier --write \"src/**/*.js\" \"test/**/*.js\" \"test-api/**/*.js\"",
"start": "babel-watch ./test-api/server.js",
"docs": "mkdocs serve",
Expand Down Expand Up @@ -107,6 +107,7 @@
"mysql": "^2.14.1",
"nyc": "^15.0.1",
"pg": "^8.2.1",
"prettier": "^3.3.3",
"sinon": "^9.0.2",
"sqlite3": "^5.1.0",
"tsd": "^0.28.1"
Expand Down
12 changes: 7 additions & 5 deletions src/alias-namespace.js
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ export default class AliasNamespace {
// a generator for infinite alias names, starting with the shortest possible
// this is helpful for generating the names when minifying
this.mininym = G.baseNAll(
'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ#$'
'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ#$',
)

// keep track of all the table names we've used since these have to be unique in each query
Expand Down Expand Up @@ -43,10 +43,12 @@ export default class AliasNamespace {
return name
}

name = this.aliasPrefix + name
.replace(/\s+/g, '')
.replace(/[^a-zA-Z0-9]/g, '_')
.slice(0, 10)
name =
this.aliasPrefix +
name
.replace(/\s+/g, '')
.replace(/[^a-zA-Z0-9]/g, '_')
.slice(0, 10)
// the table aliases must be unique
// just append a "$" until its a unique name
while (this.usedTableAliases.has(name)) {
Expand Down
29 changes: 15 additions & 14 deletions src/aliases.js
Original file line number Diff line number Diff line change
Expand Up @@ -23,27 +23,28 @@ const alwaysConflictingTypes = ['table', 'union']
// As it changes the output for custom resolvers, we do our best to only mark nodes as conflicting
// that actually need it to return the correct data.
export function hasConflictingSiblings(node, siblings) {
return !neverConflictingTypes.includes(node.type)
&& siblings.some(sibling => (
sibling !== node
&& sibling.fieldName === node.fieldName
&& sibling.alias !== node.alias
&& !neverConflictingTypes.includes(sibling.type)
&& (
alwaysConflictingTypes.includes(sibling.type)
// Fall back to comparing the args. This is mostly relevant for things like
// sqlExpr, which might use args in the query
|| !isEqual(node.args || {}, sibling.args || {})
)
))
return (
!neverConflictingTypes.includes(node.type) &&
siblings.some(
(sibling) =>
sibling !== node &&
sibling.fieldName === node.fieldName &&
sibling.alias !== node.alias &&
!neverConflictingTypes.includes(sibling.type) &&
(alwaysConflictingTypes.includes(sibling.type) ||
// Fall back to comparing the args. This is mostly relevant for things like
// sqlExpr, which might use args in the query
!isEqual(node.args || {}, sibling.args || {})),
)
)
}

// GraphQL's default resolver supports functions instead of values on source[fieldName],
// and will call this function with the information required that we can
// return the correct value for the field's alias
export function resolveAliasValue(args, context, info) {
if (!info.fieldNodes || !info.fieldNodes[0]) return null

const alias = info.fieldNodes[0].alias && info.fieldNodes[0].alias.value

// "this" is the source object that contains the aliased field values
Expand Down
27 changes: 12 additions & 15 deletions src/array-to-connection.js
< 10000 div class="data highlight js-blob-wrapper js-check-hidden-unicode " >
Original file line number Diff line number Diff line change
Expand Up @@ -34,13 +34,13 @@ function arrToConnection(data, sqlAST) {
}
const pageInfo = {
hasNextPage: false,
hasPreviousPage: false
hasPreviousPage: false,
}
if (!data) {
if (sqlAST.paginate) {
return {
pageInfo,
edges: []
edges: [],
}
}
return null
Expand All @@ -50,8 +50,8 @@ function arrToConnection(data, sqlAST) {
// we must prevent the recursive processing from visting the same object twice, because mutating the object the first
// time changes it everywhere. we'll set the `_paginated` property to true to prevent this
if (sqlAST.paginate && !data._paginated) {
if (sqlAST.sortKey || idx(sqlAST, _ => _.junction.sortKey)) {
if (idx(sqlAST, _ => _.args.first)) {
if (sqlAST.sortKey || idx(sqlAST, (_) => _.junction.sortKey)) {
if (idx(sqlAST, (_) => _.args.first)) {
// we fetched an extra one in order to determine if there is a next page, if there is one, pop off that extra
if (data.length > sqlAST.args.first) {
pageInfo.hasNextPage = true
Expand All @@ -64,7 +64,7 @@ function arrToConnection(data, sqlAST) {
data.pop()
}
data.reverse()
} else if (idx(sqlAST, _ => _.defaultPageSize)) {
} else if (idx(sqlAST, (_) => _.defaultPageSize)) {
// we fetched an extra one in order to determine if there is a next page, if there is one, pop off that extra
if (data.length > sqlAST.defaultPageSize) {
pageInfo.hasNextPage = true
Expand All @@ -75,7 +75,7 @@ function arrToConnection(data, sqlAST) {
// convert nodes to edges and compute the cursor for each
// TODO: only compute all the cursor if asked for them
const sortKey = sqlAST.sortKey || sqlAST.junction.sortKey
const edges = data.map(obj => {
const edges = data.map((obj) => {
const cursor = {}
for (let key of sortKey) {
cursor[key.column] = obj[key.column]
Expand All @@ -90,21 +90,21 @@ function arrToConnection(data, sqlAST) {
}
if (sqlAST.orderBy || (sqlAST.junction && sqlAST.junction.orderBy)) {
let offset = 0
if (idx(sqlAST, _ => _.args.after)) {
if (idx(sqlAST, (_) => _.args.after)) {
offset = cursorToOffset(sqlAST.args.after) + 1
}
// $total was a special column for determining the total number of items
const arrayLength = data[0] && parseInt(data[0].$total, 10)
let defaultArgs = sqlAST.args
if (
idx(sqlAST, _ => _.defaultPageSize) &&
!idx(defaultArgs, _ => _.first)
idx(sqlAST, (_) => _.defaultPageSize) &&
!idx(defaultArgs, (_) => _.first)
) {
defaultArgs.first = sqlAST.defaultPageSize
}
const connection = connectionFromArraySlice(data, defaultArgs, {
sliceStart: offset,
arrayLength
arrayLength,
})
connection.total = arrayLength || 0
connection._paginated = true
Expand All @@ -119,17 +119,14 @@ export default arrToConnection
function recurseOnObjInData(dataObj, astChild) {
const aliasKey = getAliasKey(astChild.fieldName, astChild.alias)
if (dataObj[aliasKey]) {
dataObj[aliasKey] = arrToConnection(
dataObj[aliasKey],
astChild
)
dataObj[aliasKey] = arrToConnection(dataObj[aliasKey], astChild)
}

const dataChild = dataObj[astChild.fieldName]
if (dataChild && typeof dataChild !== 'function') {
dataObj[astChild.fieldName] = arrToConnection(
dataObj[astChild.fieldName],
astChild
astChild,
)
}
}
75 changes: 41 additions & 34 deletions src/batch-planner/index.js
F438
Original file line number Diff line number Diff line change
Expand Up @@ -2,16 +2,17 @@ import { uniq, chain, map, groupBy, forIn } from 'lodash'
import arrToConnection from '../array-to-connection'
import { handleUserDbCall, maybeQuote, wrap, compileSqlAST } from '../util'
import idx from 'idx'
import { getAliasKey, hasConflictingSiblings, resolveAliasValue } from '../aliases'
import {
getAliasKey,
hasConflictingSiblings,
resolveAliasValue,
} from '../aliases'

async function nextBatch(sqlAST, data, dbCall, context, options) {
// paginated fields are wrapped in connections. strip those off for the batching
if (sqlAST.paginate) {
if (Array.isArray(data)) {
data = chain(data)
.flatMap('edges')
.map('node')
.value()
data = chain(data).flatMap('edges').map('node').value()
} else {
data = map(data.edges, 'node')
}
Expand All @@ -21,55 +22,64 @@ async function nextBatch(sqlAST, data, dbCall, context, options) {
}

const children = sqlAST.children
Object.values(sqlAST.typedChildren || {}).forEach(typedChildren =>
children.push(...typedChildren)
Object.values(sqlAST.typedChildren || {}).forEach((typedChildren) =>
children.push(...typedChildren),
)

// loop through all the child fields that are tables
return Promise.all(
children.map(childAST =>
nextBatchChild(childAST, data, dbCall, context, options, children)
)
children.map((childAST) =>
nextBatchChild(childAST, data, dbCall, context, options, children),
),
)
}

// processes a single child of the batch
async function nextBatchChild(childAST, data, dbCall, context, options, siblings) {
async function nextBatchChild(
childAST,
data,
dbCall,
context,
options,
siblings,
) {
if (childAST.type !== 'table' && childAST.type !== 'union') return

const isConflicting = hasConflictingSiblings(childAST, siblings)
const fieldName = childAST.fieldName
const valueKey = isConflicting ? getAliasKey(fieldName, childAST.alias) : fieldName
const valueKey = isConflicting
? getAliasKey(fieldName, childAST.alias)
: fieldName

// see if any begin a new batch
if (childAST.sqlBatch || idx(childAST, _ => _.junction.sqlBatch)) {
if (childAST.sqlBatch || idx(childAST, (_) => _.junction.sqlBatch)) {
let thisKey
let parentKey
if (childAST.sqlBatch) {
// if so, we know we'll need to get the key for matching with the parent key
childAST.children.push(childAST.sqlBatch.thisKey)
thisKey = childAST.sqlBatch.thisKey.fieldName
parentKey = childAST.sqlBatch.parentKey.fieldName
} else if (idx(childAST, _ => _.junction.sqlBatch)) {
} else if (idx(childAST, (_) => _.junction.sqlBatch)) {
childAST.children.push(childAST.junction.sqlBatch.thisKey)
thisKey = childAST.junction.sqlBatch.thisKey.fieldName
parentKey = childAST.junction.sqlBatch.parentKey.fieldName
}

if (Array.isArray(data)) {
// the "batch scope" is teh set of values to match this key against from the previous batch
const batchScope = uniq(data.map(obj => maybeQuote(obj[parentKey])))
const batchScope = uniq(data.map((obj) => maybeQuote(obj[parentKey])))
// generate the SQL, with the batch scope values incorporated in a WHERE IN clause
const { sql, shapeDefinition } = await compileSqlAST(childAST, context, {
...options,
batchScope
batchScope,
})
// grab the data
let newData = await handleUserDbCall(
dbCall,
sql,
childAST,
wrap(shapeDefinition)
wrap(shapeDefinition),
)
// group the rows by the key so we can match them with the previous batch
newData = groupBy(newData, thisKey)
Expand All @@ -92,8 +102,8 @@ async function nextBatchChild(childAST, data, dbCall, context, options, siblings
startCursor: null,
endCursor: null,
hasNextPage: false,
hasPreviousPage: false
}
hasPreviousPage: false,
},
}
: [])
if (isConflicting) obj[fieldName] = resolveAliasValue
Expand All @@ -104,11 +114,8 @@ async function nextBatchChild(childAST, data, dbCall, context, options, siblings
const ob = newData[obj[parentKey]]
if (ob) {
obj[valueKey] = Object.assign(
obj[valueKey] ?? {},
arrToConnection(
newData[obj[parentKey]][0],
childAST
)
obj[valueKey] ?? {},
arrToConnection(newData[obj[parentKey]][0], childAST),
)
matchedData.push(obj)
} else {
Expand All @@ -121,22 +128,22 @@ async function nextBatchChild(childAST, data, dbCall, context, options, siblings

// move down a level and recurse
const nextLevelData = chain(data)
.filter(obj => obj != null)
.flatMap(obj => obj[valueKey])
.filter(obj => obj != null)
.filter((obj) => obj != null)
.flatMap((obj) => obj[valueKey])
.filter((obj) => obj != null)
.value()
return nextBatch(childAST, nextLevelData, dbCall, context, options)
}
const batchScope = [maybeQuote(data[parentKey])]
const { sql, shapeDefinition } = await compileSqlAST(childAST, context, {
...options,
batchScope
batchScope,
})
let newData = await handleUserDbCall(
dbCall,
sql,
childAST,
wrap(shapeDefinition)
wrap(shapeDefinition),
)
newData = groupBy(newData, thisKey)
if (childAST.paginate) {
Expand All @@ -148,19 +155,19 @@ async function nextBatchChild(childAST, data, dbCall, context, options, siblings
const targets = newData[data[parentKey]] || []
data[valueKey] = targets[0]
}

if (isConflicting) data[fieldName] = resolveAliasValue

if (data) {
return nextBatch(childAST, data[valueKey], dbCall, context, options)
}

// otherwise, just bypass this and recurse down to the next level
} else if (Array.isArray(data)) {
const nextLevelData = chain(data)
.filter(obj => obj != null)
.flatMap(obj => obj[valueKey])
.filter(obj => obj != null)
.filter((obj) => obj != null)
.flatMap((obj) => obj[valueKey])
.filter((obj) => obj != null)
.value()
return nextBatch(childAST, nextLevelData, dbCall, context, options)
} else if (data) {
Expand Down
Loading
Loading
0