Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/main' into k8s
Browse files Browse the repository at this point in the history
  • Loading branch information
sylvain-morin committed Feb 9, 2024
2 parents 08076ac + a0da003 commit 4e6b97c
Show file tree
Hide file tree
Showing 85 changed files with 5,123 additions and 1,837 deletions.
17 changes: 16 additions & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
FROM node:16
# Platform should be forced to amd64
# because node-mapnik is not available in arm64
FROM --platform=linux/amd64 node:16 as base

ENV NODE_ENV=development

Expand All @@ -15,6 +17,18 @@ COPY --chown=inaturalist:inaturalist config.docker.js config.js
# Install dependencies
RUN npm install

FROM base as test

ENV NODE_ENV=test

RUN apt-get update -qq && apt-get install -y postgresql-client-11

COPY . .

CMD [ "npm", "run", "coverage" ]

FROM base as development

# Copy app and libs
COPY --chown=inaturalist:inaturalist lib lib
COPY --chown=inaturalist:inaturalist openapi openapi
Expand All @@ -28,6 +42,7 @@ RUN mkdir /home/inaturalist/api/log
RUN mkdir /home/inaturalist/api/cache
RUN mkdir -p /home/inaturalist/api/public/uploads


EXPOSE 4000

CMD [ "node", "app.js" ]
28 changes: 26 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,9 @@ Our API is documented using the [Swagger](http://swagger.io/)/[OpenAPI](https://
npm install
# Fill in vals to connect to Rails, Postgres, and elasticsearch
cp config_example.js config.js
# Run the node app on port 4000
node app.js
# Run the node app on port 4000. NODE_ENV is required, so you'll need to set
# it here or elsewhere in your environment
NODE_ENV=development node app.js
```

# Running Tests
Expand All @@ -25,6 +26,29 @@ Filter by pattern: `NODE_ENV=test ./node_modules/mocha/bin/_mocha --recursive --

You can also add `.only` to a `describe` or `it` call to only run that test when you run `npm test`, e.g. `it.only( "should only run this test" )`.

# Running Tests with Docker

You can run the tests with Docker Compose.
All required services will be started by the `docker-compose.test.yml` compose file:

```
docker compose -f docker-compose.test.yml up -d
```

You can follow the tests execution in the logs:

```
docker logs -f api-test
```

The first time you run the compose file, a local docker image for the API service will be automatically built, from you local GIT checkout. But if later you do some code changes, or update your GIT checkout, you need to re-build the docker image with:

```
docker compose -f docker-compose.test.yml build
```

This compose build is using the `test` target of the Dockerfile.

# ESLint

Please run ESLint to check for syntax formatting errors. To run ESLint, run: `npm run eslint`. Please address any syntax errors before submitting pull requests. ESLint will also run automatically via Github Actions on submitted pull requests along with tests.
Expand Down
3 changes: 1 addition & 2 deletions config.js.ci
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,7 @@ module.exports = {
user: "postgres",
host: "127.0.0.1",
port: 5432,
geometry_field: "geom",
dbname: "inaturalist_test"
geometry_field: "geom"
},
websiteURL: "http://localhost:3000/",
staticImagePrefix: "http://localhost:3000/attachments/",
Expand Down
3 changes: 3 additions & 0 deletions config_example.js
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,9 @@ module.exports = {
host: INAT_ES_HOST ? `http://${INAT_ES_HOST}:9200` : "http://localhost:9200",
geoPointField: "location"
},
// Note that the database name will be inferred from the NODE_ENV
// environment variable, e.g. `inaturalist_${process.env.NODE_ENV}`, or it
// be set explicitly with process.env.INAT_DB_NAME
database: {
user: INAT_DB_USER || "inaturalist",
host: INAT_DB_HOST || "127.0.0.1",
Expand Down
70 changes: 70 additions & 0 deletions docker-compose.test.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,70 @@
version: "2"
services:

redis:
container_name: redis
image: redis:6.0.3
ports:
- 6379:6379
volumes:
- redis_data_test:/data

memcached:
container_name: memcached
image: memcached:1.6.6
ports:
- 11211:11211

pg:
container_name: pg
image: postgis/postgis:12-3.0
environment:
POSTGRES_USER: 'inaturalist'
POSTGRES_PASSWORD: 'inaturalist'
POSTGRES_DB: inaturalist_test
ports:
- 5432:5432
volumes:
- pg_data_test:/var/lib/postgresql/data
- ./schema/database.sql:/docker-entrypoint-initdb.d/database.sql

es:
container_name: es
image: docker.elastic.co/elasticsearch/elasticsearch:8.9.1
environment:
- discovery.type=single-node
- xpack.security.enabled=false
ports:
- 9200:9200
volumes:
- es_data_test:/usr/share/elasticsearch/data
command: >
/bin/sh -c "bin/elasticsearch-plugin list | grep -q analysis-kuromoji
|| bin/elasticsearch-plugin install analysis-kuromoji;
/usr/local/bin/docker-entrypoint.sh"
api-test:
container_name: api-test
build:
context: .
dockerfile: Dockerfile
target: test
environment:
NODE_ENV: test
INAT_DB_HOST: pg
INAT_DB_USER : 'inaturalist'
INAT_DB_PASS: 'inaturalist'
INAT_ES_HOST: es
INAT_REDIS_HOST: redis
INAT_WEB_HOST: host.docker.internal
INAT_DB_NAME: inaturalist_test
INAT_ES_INDEX_PREFIX: test
ports:
- 4000:4000
extra_hosts:
- "host.docker.internal:host-gateway"

volumes:
redis_data_test:
pg_data_test:
es_data_test:
14 changes: 13 additions & 1 deletion lib/controllers/v1/announcements_controller.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,12 @@ const { announcements } = require( "inaturalistjs" );
const InaturalistAPI = require( "../../inaturalist_api" );
const pgClient = require( "../../pg_client" );
const Site = require( "../../models/site" );
const util = require( "../../util" );

const AnnouncementsController = class AnnouncementsController {
static async search( req ) {
let query = squel.select( )
.field( "announcements.id, body, placement, dismissible, locales, start, \"end\"" )
.field( "announcements.id, body, placement, dismissible, locales, clients, start, \"end\"" )
.from( "announcements" )
.where( "NOW() at time zone 'utc' between start and \"end\"" )
.order( "announcements.id" );
Expand All @@ -29,6 +30,17 @@ const AnnouncementsController = class AnnouncementsController {
query = query.where( placementClause );
}

const userAgentClient = util.userAgentClient( req );
if ( req.query.client || userAgentClient ) {
// given a client parameter, return only announcements that include that client,
// or announcements with no client specified
query = query.where( "? = ANY( clients ) OR clients IS NULL OR clients = '{}'",
req.query.client || userAgentClient );
} else {
// if there is no client parameter, return only announcements with no client specified
query = query.where( "clients IS NULL OR clients = '{}'" );
}

// site_id filter
if ( req.userSession ) {
// authenticated requests include announcements targeted at the users site,
Expand Down
13 changes: 7 additions & 6 deletions lib/controllers/v1/identifications_controller.js
Original file line number Diff line number Diff line change
Expand Up @@ -292,21 +292,22 @@ const IdentificationsController = class IdentificationsController {
static async identifiers( req ) {
const countQuery = _.assignIn( { }, req.query );
const paginationOptions = { default: 500, max: 500 };
const { aggSize } = InaturalistAPI.paginationData( req, paginationOptions );
const { offset, aggSize } = InaturalistAPI.paginationData( req, paginationOptions );
countQuery.aggs = { };
if ( !req.query.no_total_hits ) {
countQuery.aggs.total = {
cardinality: { field: "user.id", precision_threshold: 5000 }
};
}
if ( aggSize > 0 ) {
if ( offset < 500 && aggSize > 0 ) {
countQuery.aggs.users = {
terms: { field: "user.id", size: paginationOptions.perPage }
terms: { field: "user.id", size: aggSize }
};
}
return ESModel.userAggregationQuery(
req, countQuery, IdentificationsController.elasticResults, { }
);
return ESModel.userAggregationQuery( req,
countQuery,
IdentificationsController.elasticResults,
{ paginationOptions } );
}

static async observers( req ) {
Expand Down
13 changes: 7 additions & 6 deletions lib/controllers/v1/observations_controller.js
Original file line number Diff line number Diff line change
Expand Up @@ -235,6 +235,7 @@ ObservationsController.elasticResults = async function observationElasticResults
&& ( _.isEmpty( query.inverse_filters ) || excludingOnlyUserIDs )
&& _.isEmpty( query.grouped_inverse_filters )
&& _.isEqual( query.sort, { created_at: "desc" } )
&& _.isEmpty( req.query.aggs )
) {
const sourceParams = util.sourceParams( returnOnlyID ? ["id"] : {
includes: opts.includes,
Expand Down Expand Up @@ -405,7 +406,7 @@ ObservationsController.taxa = async req => {
taxon_ids: {
terms: {
field: "taxon.id",
size: 500000
size: 600000
}
}
}
Expand Down Expand Up @@ -875,7 +876,7 @@ ObservationsController.taxonomy = async req => {
min_species_taxon_ids: {
terms: {
field: "taxon.id",
size: 500000
size: 600000
}
}
}
Expand Down Expand Up @@ -1238,8 +1239,8 @@ ObservationsController.observationsObserverCounts = async req => {
};
// attempting to account for inaccurate counts for queries with a small size
// see https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-terms-aggregation.html#search-aggregations-bucket-terms-aggregation-shard-size
if ( ( ( aggSize * 1.5 ) + 10 ) < 200 ) {
countQuery.aggs.top_observers.terms.shard_size = 200;
if ( ( ( aggSize * 1.5 ) + 10 ) < 500 ) {
countQuery.aggs.top_observers.terms.shard_size = 500;
}
}
countQuery.per_page = 0;
Expand Down Expand Up @@ -1289,8 +1290,8 @@ ObservationsController.observationsSpeciesObserverCounts = async req => {
};
// attempting to account for inaccurate counts for queries with a small size
// see https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-terms-aggregation.html#search-aggregations-bucket-terms-aggregation-shard-size
if ( ( ( aggSize * 1.5 ) + 10 ) < 200 ) {
countQuery.aggs.user_taxa.terms.shard_size = 200;
if ( ( ( aggSize * 1.5 ) + 10 ) < 500 ) {
countQuery.aggs.user_taxa.terms.shard_size = 500;
}
if ( aggSize > 0 ) {
countQuery.aggs.user_taxa.aggs = {
Expand Down
8 changes: 4 additions & 4 deletions lib/controllers/v1/projects_controller.js
Original file line number Diff line number Diff line change
Expand Up @@ -221,7 +221,7 @@ const ProjectsController = class ProjectsController {

static async show( req ) {
InaturalistAPI.setPerPage( req, { max: 100 } );
const ids = _.filter( req.params.id.split( "," ), _.identity );
const ids = _.filter( req.params.id.toString( ).split( "," ), _.identity );
if ( ids.length > req.query.per_page ) {
throw util.httpError( 422, "Too many IDs" );
}
Expand Down Expand Up @@ -290,7 +290,7 @@ const ProjectsController = class ProjectsController {
null,
"prup.owner_id = pu.id AND prup.owner_type = 'ProjectUser' AND prup.name = 'updates'"
)
.where( "p.id IN (?)", ids )
.where( "p.id IN ?", ids )
.where( "pu.user_id = ?", req.userSession.user_id );
const { rows } = await pgClient.query( query.toString( ) );
_.each( rows, row => {
Expand Down Expand Up @@ -344,13 +344,13 @@ const ProjectsController = class ProjectsController {

static async posts( req ) {
const { page, perPage } = InaturalistAPI.paginationData( req, { default: 10, max: 30 } );
const ids = _.filter( req.params.id.split( "," ), _.identity );
const ids = _.filter( req.params.id.toString( ).split( "," ), _.identity );
let numericIDs = _.filter( ids, id => Number( id ) );
if ( _.isEmpty( numericIDs ) ) { numericIDs = [-1]; }
const query = squel.select( ).field( "posts.*, count(*) OVER() AS total_count" )
.from( "posts" )
.join( "projects", null, "posts.parent_id = projects.id AND parent_type='Project'" )
.where( "projects.id IN (?) OR projects.slug IN (?)", numericIDs, ids )
.where( "projects.id IN ? OR projects.slug IN ?", numericIDs, ids )
.where( "posts.published_at IS NOT NULL" )
.order( "posts.published_at", false )
.limit( perPage )
Expand Down
3 changes: 3 additions & 0 deletions lib/controllers/v1/taxa_controller.js
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,9 @@ TaxaController.show = async req => {
throw util.httpError( 422, "Too many IDs" );
}
const filters = [{ terms: { id: ids } }];
if ( req.query.rank_level ) {
filters.push( esClient.termFilter( "rank_level", req.query.rank_level ) );
}
return TaxaController.searchQuery( req, {
filters,
details: true,
Expand Down
32 changes: 32 additions & 0 deletions lib/controllers/v1/users_controller.js
Original file line number Diff line number Diff line change
Expand Up @@ -384,6 +384,38 @@ const UsersController = class UsersController {
static async resendConfirmation( req ) {
return InaturalistAPI.iNatJSWrap( users.resendConfirmation, req );
}

static async recentObservationFields( req ) {
if ( !req.userSession ) { throw new Error( 401 ); }
const query = squel.select( { autoQuoteAliasNames: false } )
.field( "id" )
.field( "name" )
.field( "description" )
.field( "datatype" )
.field( "allowed_values" )
.field( "values_count" )
.field( "ofvs.ofv_max_id" )
.from( "observation_fields" )
.join(
squel.select( )
.field( "observation_field_id" )
.field( "max(observation_field_values.id) as ofv_max_id" )
.from( "observation_field_values" )
.where( "user_id = ?", req.userSession.user_id )
.group( "observation_field_id" ), "ofvs", "observation_fields.id = ofvs.observation_field_id"
)
.order( "ofvs.ofv_max_id", false )
.limit( 10 );
const { rows } = await pgClient.query( query.toString( ) );
return {
total_results: rows.length,
page: 1,
per_page: rows.length,
results: _.map( rows, r => _.pick( r, [
"id", "name", "description", "datatype", "allowed_values", "values_count"
] ) )
};
}
};

module.exports = UsersController;
18 changes: 0 additions & 18 deletions lib/controllers/v2/projects_controller.js

This file was deleted.

Loading

0 comments on commit 4e6b97c

Please sign in to comment.