Merge pull request #10 from JurunenseDesenvolvimento/homologacao

Homologacao
This commit is contained in:
Joelson
2025-11-14 17:39:04 -03:00
committed by GitHub
146 changed files with 7073 additions and 3896 deletions

32
.dockerignore Normal file
View File

@@ -0,0 +1,32 @@
node_modules
npm-debug.log
dist
.git
.gitignore
.env
.env.*
!.env.example
coverage
.nyc_output
*.log
*.md
.vscode
.idea
*.swp
*.swo
*~
.DS_Store
test
*.spec.ts
*.spec.js
__tests__
jest.config.js
jest.setup.js
.eslintrc.js
eslint.config.js
.prettierrc
.prettierignore
monitoring
docs

254
.github/workflows/ci.yml vendored Normal file
View File

@@ -0,0 +1,254 @@
name: CI
on:
push:
branches: [ main, master, develop, homologacao ]
tags:
- 'v*'
pull_request:
branches: [ main, master, develop, homologacao ]
jobs:
lint:
name: Lint and Format Check
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: '20.x'
cache: 'npm'
- name: Install dependencies
run: npm ci
- name: Run ESLint
run: npx eslint "src/**/*.ts" --max-warnings 0
continue-on-error: true
- name: Run Prettier check
run: npx prettier --check "src/**/*.ts" "test/**/*.ts"
continue-on-error: true
build:
name: Build
runs-on: ubuntu-latest
needs: lint
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: '20.x'
cache: 'npm'
- name: Install dependencies
run: npm ci
- name: Build project
run: npm run build
- name: Upload build artifacts
uses: actions/upload-artifact@v4
with:
name: dist
path: dist/
retention-days: 1
test:
name: Unit Tests
runs-on: ubuntu-latest
needs: lint
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: '20.x'
cache: 'npm'
- name: Install dependencies
run: npm ci
- name: Run unit tests
run: npm run test
- name: Generate coverage report
run: npm run test:cov
continue-on-error: true
- name: Upload coverage reports
uses: actions/upload-artifact@v4
if: always()
with:
name: coverage
path: coverage/
retention-days: 7
test-e2e:
name: E2E Tests
runs-on: ubuntu-latest
needs: build
if: github.event_name == 'pull_request' || github.ref == 'refs/heads/main' || github.ref == 'refs/heads/master'
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: '20.x'
cache: 'npm'
- name: Install dependencies
run: npm ci
- name: Build project
run: npm run build
- name: Run e2e tests
run: npm run test:e2e
env:
NODE_ENV: test
continue-on-error: true
docker-build:
name: Docker Build
runs-on: ubuntu-latest
needs: build
if: github.event_name == 'push'
permissions:
contents: read
packages: write
env:
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository }}
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Log in to Container Registry
uses: docker/login-action@v3
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Get package version
id: package-version
run: |
VERSION=$(node -p "require('./package.json').version")
echo "version=$VERSION" >> $GITHUB_OUTPUT
echo "Package version: $VERSION"
- name: Extract metadata
id: meta
uses: docker/metadata-action@v5
with:
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
tags: |
type=ref,event=branch
type=ref,event=pr
type=semver,pattern={{version}}
type=semver,pattern={{major}}.{{minor}}
type=semver,pattern={{major}}
type=sha,prefix={{branch}}-
type=raw,value=${{ steps.package-version.outputs.version }},enable=${{ github.ref_type == 'branch' }}
type=raw,value=latest,enable=${{ github.ref == format('refs/heads/{0}', github.event.repository.default_branch) }}
- name: Build and push Docker image
uses: docker/build-push-action@v5
with:
context: .
file: ./Dockerfile
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
cache-from: type=gha
cache-to: type=gha,mode=max
release:
name: Create Release
runs-on: ubuntu-latest
needs: [build, docker-build]
if: startsWith(github.ref, 'refs/tags/v') && github.ref_type == 'tag'
permissions:
contents: write
env:
REGISTRY: ghcr.io
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
fetch-depth: 0
fetch-tags: true
- name: Check if tag is on main branch
id: check-branch
run: |
git fetch origin main:main 2>/dev/null || true
if git branch -r --contains ${{ github.ref_name }} | grep -q 'origin/main'; then
echo "on_main=true" >> $GITHUB_OUTPUT
echo "Tag is on main branch"
else
echo "on_main=false" >> $GITHUB_OUTPUT
echo "Tag is not on main branch, skipping release"
fi
- name: Get version from tag
id: version
run: |
TAG_VERSION=${GITHUB_REF#refs/tags/v}
echo "version=$TAG_VERSION" >> $GITHUB_OUTPUT
echo "Tag version: $TAG_VERSION"
- name: Generate changelog
id: changelog
run: |
PREVIOUS_TAG=$(git describe --tags --abbrev=0 HEAD^ 2>/dev/null || echo "")
if [ -z "$PREVIOUS_TAG" ]; then
CHANGELOG=$(git log --pretty=format:"- %s (%h)" HEAD)
else
CHANGELOG=$(git log --pretty=format:"- %s (%h)" ${PREVIOUS_TAG}..HEAD)
fi
echo "changelog<<EOF" >> $GITHUB_OUTPUT
echo "$CHANGELOG" >> $GITHUB_OUTPUT
echo "EOF" >> $GITHUB_OUTPUT
- name: Create Release
if: steps.check-branch.outputs.on_main == 'true'
uses: softprops/action-gh-release@v1
with:
tag_name: ${{ github.ref_name }}
name: Release ${{ steps.version.outputs.version }}
body: |
## Versão ${{ steps.version.outputs.version }}
### Mudanças
${{ steps.changelog.outputs.changelog }}
### Docker Image
```bash
docker pull ${{ env.REGISTRY }}/${{ github.repository }}:${{ steps.version.outputs.version }}
docker pull ${{ env.REGISTRY }}/${{ github.repository }}:latest
```
draft: false
prerelease: false

61
Dockerfile Normal file
View File

@@ -0,0 +1,61 @@
FROM oraclelinux:9 AS base
WORKDIR /app
RUN dnf install -y \
curl \
libaio \
&& curl -fsSL https://rpm.nodesource.com/setup_20.x | bash - \
&& dnf install -y nodejs \
&& dnf clean all
FROM base AS dependencies
COPY package*.json ./
RUN npm ci --only=production && npm cache clean --force
FROM base AS build
COPY package*.json ./
RUN npm ci
COPY . .
RUN npm run build
FROM base AS production
RUN dnf install -y \
curl \
libaio \
&& dnf clean all
ENV ORACLE_CLIENT_LIB_DIR=/usr/lib/oracle/21/client64/lib
RUN curl -fSL --cookie-jar /tmp/cookies.txt --retry 3 \
"https://download.oracle.com/otn_software/linux/instantclient/2112000/el9/oracle-instantclient-basiclite-21.12.0.0.0-1.el9.x86_64.rpm" \
--output /tmp/oracle-instantclient-basiclite.rpm && \
dnf install -y /tmp/oracle-instantclient-basiclite.rpm && \
rm -f /tmp/oracle-instantclient-basiclite.rpm /tmp/cookies.txt && \
dnf clean all
ENV LD_LIBRARY_PATH=/usr/lib/oracle/21/client64/lib
ENV TZ=America/Sao_Paulo
RUN groupadd -r node && useradd -r -g node node
COPY --from=dependencies /app/node_modules ./node_modules
COPY --from=build /app/dist ./dist
COPY --from=build /app/package*.json ./
COPY --from=build /app/cert ./cert
RUN chown -R node:node /app
EXPOSE 8066
USER node
CMD ["node", "dist/main"]

127
docs/VERSIONAMENTO.md Normal file
View File

@@ -0,0 +1,127 @@
# Sistema de Versionamento
Este documento descreve o sistema de versionamento utilizado no projeto, incluindo como as imagens Docker são versionadas e como criar releases no GitHub.
## Visão Geral
O projeto utiliza versionamento semântico baseado no arquivo `package.json` e tags Git para gerenciar versões de imagens Docker e releases no GitHub Container Registry (GHCR).
## Versionamento de Imagens Docker
As imagens Docker são automaticamente versionadas durante o processo de CI/CD baseado no contexto do push:
### Branches
- **Branch `homologacao`**: `ghcr.io/usuario/repo:homologacao`
- **Branch `main`**:
- `ghcr.io/usuario/repo:main`
- `ghcr.io/usuario/repo:latest`
- `ghcr.io/usuario/repo:{versao-do-package.json}`
### Tags Git
Quando uma tag é criada (formato `v*`, exemplo: `v0.1.0`), as seguintes tags são geradas:
- `ghcr.io/usuario/repo:0.1.0` (versão completa)
- `ghcr.io/usuario/repo:0.1` (major.minor)
- `ghcr.io/usuario/repo:0` (major)
### Commits
Cada commit gera uma tag com o SHA do commit:
- `ghcr.io/usuario/repo:{branch}-{sha}`
## Releases no GitHub
Releases são criadas automaticamente quando uma tag Git no formato `v*` é enviada para a branch `main`. O processo inclui:
1. Verificação se a tag está na branch `main`
2. Geração automática de changelog baseado nos commits desde a última tag
3. Criação da release no GitHub com:
- Nome da versão
- Changelog completo
- Instruções para download da imagem Docker
## Como Criar uma Nova Versão
### Para Homologação
1. Atualize a versão no `package.json`:
```bash
npm version patch # Incrementa patch: 0.0.1 -> 0.0.2
npm version minor # Incrementa minor: 0.0.1 -> 0.1.0
npm version major # Incrementa major: 0.0.1 -> 1.0.0
```
2. Faça commit e push:
```bash
git add package.json package-lock.json
git commit -m "chore: bump version to 0.1.0"
git push origin homologacao
```
A imagem Docker será automaticamente buildada e publicada com a tag correspondente à branch.
### Para Produção (Release)
1. Faça merge da branch `homologacao` para `main`:
```bash
git checkout main
git merge homologacao
```
2. Crie uma tag e faça push:
```bash
git tag v0.1.0
git push origin main --tags
```
Este processo irá:
- Buildar a imagem Docker com as tags de versão semântica
- Criar automaticamente uma release no GitHub
- Publicar a imagem no GitHub Container Registry
## Verificar Versão Atual
A versão atual do projeto está definida no arquivo `package.json`:
```json
{
"version": "0.0.1"
}
```
## Estrutura de Tags Docker
As imagens Docker seguem o seguinte padrão de nomenclatura:
- **Desenvolvimento**: `{registry}/{repo}:{branch-name}`
- **Versão específica**: `{registry}/{repo}:{version}` (ex: `0.1.0`)
- **Versão parcial**: `{registry}/{repo}:{major}.{minor}` (ex: `0.1`)
- **Major version**: `{registry}/{repo}:{major}` (ex: `0`)
- **Latest**: `{registry}/{repo}:latest` (apenas branch main)
- **Commit SHA**: `{registry}/{repo}:{branch}-{sha}`
## Workflow de CI/CD
O workflow de CI/CD está configurado no arquivo `.github/workflows/ci.yml` e executa os seguintes jobs:
1. **Lint**: Verificação de código e formatação
2. **Build**: Compilação do projeto TypeScript
3. **Test**: Execução de testes unitários
4. **Test E2E**: Execução de testes end-to-end (apenas PRs e main)
5. **Docker Build**: Build e push da imagem Docker
6. **Release**: Criação de release no GitHub (apenas tags na main)
## Notas Importantes
- Tags devem seguir o formato semântico: `v{major}.{minor}.{patch}` (ex: `v1.0.0`)
- Releases são criadas apenas para tags na branch `main`
- A versão no `package.json` é usada para versionar imagens em branches
- Tags Git são usadas para versionar imagens em releases de produção

39
eslint.config.js Normal file
View File

@@ -0,0 +1,39 @@
const js = require('@eslint/js');
const parser = require('@typescript-eslint/parser');
const plugin = require('@typescript-eslint/eslint-plugin');
const prettierPlugin = require('eslint-plugin-prettier');
const prettierConfig = require('eslint-config-prettier');
module.exports = [
js.configs.recommended,
prettierConfig,
{
files: ['**/*.ts'],
languageOptions: {
parser: parser,
parserOptions: {
project: './tsconfig.json',
sourceType: 'module',
},
globals: {
node: true,
jest: true,
},
},
plugins: {
'@typescript-eslint': plugin,
prettier: prettierPlugin,
},
rules: {
...plugin.configs.recommended.rules,
'@typescript-eslint/interface-name-prefix': 'off',
'@typescript-eslint/explicit-function-return-type': 'off',
'@typescript-eslint/explicit-module-boundary-types': 'off',
'@typescript-eslint/no-explicit-any': 'off',
'prettier/prettier': 'error',
},
},
{
ignores: ['dist/**', 'node_modules/**', 'coverage/**'],
},
];

13
jest.setup.js Normal file
View File

@@ -0,0 +1,13 @@
// Mock para resolver problema do TypeORM com node:url
// Este arquivo é executado antes de todos os testes
// Mock do módulo 'glob' do TypeORM que causa problemas
jest.mock('glob', () => {
const originalModule = jest.requireActual('glob');
return {
...originalModule,
glob: jest.fn(),
globSync: jest.fn(),
};
});

481
package-lock.json generated
View File

@@ -64,6 +64,9 @@
"@types/multer": "^1.4.12", "@types/multer": "^1.4.12",
"@types/node": "^22.14.0", "@types/node": "^22.14.0",
"@types/supertest": "^2.0.10", "@types/supertest": "^2.0.10",
"@typescript-eslint/eslint-plugin": "^8.46.3",
"@typescript-eslint/parser": "^8.46.3",
"eslint": "^9.39.1",
"eslint-config-prettier": "^6.15.0", "eslint-config-prettier": "^6.15.0",
"eslint-plugin-prettier": "^3.1.4", "eslint-plugin-prettier": "^3.1.4",
"jest": "^30.2.0", "jest": "^30.2.0",
@@ -812,7 +815,6 @@
"integrity": "sha512-ayVFHdtZ+hsq1t2Dy24wCmGXGe4q9Gu3smhLYALJrr473ZH27MsnSL+LKUlimp4BWJqMDMLmPpx/Q9R3OAlL4g==", "integrity": "sha512-ayVFHdtZ+hsq1t2Dy24wCmGXGe4q9Gu3smhLYALJrr473ZH27MsnSL+LKUlimp4BWJqMDMLmPpx/Q9R3OAlL4g==",
"dev": true, "dev": true,
"license": "MIT", "license": "MIT",
"peer": true,
"dependencies": { "dependencies": {
"eslint-visitor-keys": "^3.4.3" "eslint-visitor-keys": "^3.4.3"
}, },
@@ -832,7 +834,6 @@
"integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==", "integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==",
"dev": true, "dev": true,
"license": "Apache-2.0", "license": "Apache-2.0",
"peer": true,
"engines": { "engines": {
"node": "^12.22.0 || ^14.17.0 || >=16.0.0" "node": "^12.22.0 || ^14.17.0 || >=16.0.0"
}, },
@@ -846,7 +847,6 @@
"integrity": "sha512-EriSTlt5OC9/7SXkRSCAhfSxxoSUgBm33OH+IkwbdpgoqsSsUg7y3uh+IICI/Qg4BBWr3U2i39RpmycbxMq4ew==", "integrity": "sha512-EriSTlt5OC9/7SXkRSCAhfSxxoSUgBm33OH+IkwbdpgoqsSsUg7y3uh+IICI/Qg4BBWr3U2i39RpmycbxMq4ew==",
"dev": true, "dev": true,
"license": "MIT", "license": "MIT",
"peer": true,
"engines": { "engines": {
"node": "^12.0.0 || ^14.0.0 || >=16.0.0" "node": "^12.0.0 || ^14.0.0 || >=16.0.0"
} }
@@ -857,7 +857,6 @@
"integrity": "sha512-aw1gNayWpdI/jSYVgzN5pL0cfzU02GT3NBpeT/DXbx1/1x7ZKxFPd9bwrzygx/qiwIQiJ1sw/zD8qY/kRvlGHA==", "integrity": "sha512-aw1gNayWpdI/jSYVgzN5pL0cfzU02GT3NBpeT/DXbx1/1x7ZKxFPd9bwrzygx/qiwIQiJ1sw/zD8qY/kRvlGHA==",
"dev": true, "dev": true,
"license": "Apache-2.0", "license": "Apache-2.0",
"peer": true,
"dependencies": { "dependencies": {
"@eslint/object-schema": "^2.1.7", "@eslint/object-schema": "^2.1.7",
"debug": "^4.3.1", "debug": "^4.3.1",
@@ -873,7 +872,6 @@
"integrity": "sha512-gBrxN88gOIf3R7ja5K9slwNayVcZgK6SOUORm2uBzTeIEfeVaIhOpCtTox3P6R7o2jLFwLFTLnC7kU/RGcYEgw==", "integrity": "sha512-gBrxN88gOIf3R7ja5K9slwNayVcZgK6SOUORm2uBzTeIEfeVaIhOpCtTox3P6R7o2jLFwLFTLnC7kU/RGcYEgw==",
"dev": true, "dev": true,
"license": "Apache-2.0", "license": "Apache-2.0",
"peer": true,
"dependencies": { "dependencies": {
"@eslint/core": "^0.17.0" "@eslint/core": "^0.17.0"
}, },
@@ -887,7 +885,6 @@
"integrity": "sha512-yL/sLrpmtDaFEiUj1osRP4TI2MDz1AddJL+jZ7KSqvBuliN4xqYY54IfdN8qD8Toa6g1iloph1fxQNkjOxrrpQ==", "integrity": "sha512-yL/sLrpmtDaFEiUj1osRP4TI2MDz1AddJL+jZ7KSqvBuliN4xqYY54IfdN8qD8Toa6g1iloph1fxQNkjOxrrpQ==",
"dev": true, "dev": true,
"license": "Apache-2.0", "license": "Apache-2.0",
"peer": true,
"dependencies": { "dependencies": {
"@types/json-schema": "^7.0.15" "@types/json-schema": "^7.0.15"
}, },
@@ -901,7 +898,6 @@
"integrity": "sha512-gtF186CXhIl1p4pJNGZw8Yc6RlshoePRvE0X91oPGb3vZ8pM3qOS9W9NGPat9LziaBV7XrJWGylNQXkGcnM3IQ==", "integrity": "sha512-gtF186CXhIl1p4pJNGZw8Yc6RlshoePRvE0X91oPGb3vZ8pM3qOS9W9NGPat9LziaBV7XrJWGylNQXkGcnM3IQ==",
"dev": true, "dev": true,
"license": "MIT", "license": "MIT",
"peer": true,
"dependencies": { "dependencies": {
"ajv": "^6.12.4", "ajv": "^6.12.4",
"debug": "^4.3.2", "debug": "^4.3.2",
@@ -926,7 +922,6 @@
"integrity": "sha512-S26Stp4zCy88tH94QbBv3XCuzRQiZ9yXofEILmglYTh/Ug/a9/umqvgFtYBAo3Lp0nsI/5/qH1CCrbdK3AP1Tw==", "integrity": "sha512-S26Stp4zCy88tH94QbBv3XCuzRQiZ9yXofEILmglYTh/Ug/a9/umqvgFtYBAo3Lp0nsI/5/qH1CCrbdK3AP1Tw==",
"dev": true, "dev": true,
"license": "MIT", "license": "MIT",
"peer": true,
"engines": { "engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0" "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
}, },
@@ -940,7 +935,6 @@
"integrity": "sha512-VtAOaymWVfZcmZbp6E2mympDIHvyjXs/12LqWYjVw6qjrfF+VK+fyG33kChz3nnK+SU5/NeHOqrTEHS8sXO3OA==", "integrity": "sha512-VtAOaymWVfZcmZbp6E2mympDIHvyjXs/12LqWYjVw6qjrfF+VK+fyG33kChz3nnK+SU5/NeHOqrTEHS8sXO3OA==",
"dev": true, "dev": true,
"license": "Apache-2.0", "license": "Apache-2.0",
"peer": true,
"engines": { "engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0" "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
} }
@@ -951,7 +945,6 @@
"integrity": "sha512-43/qtrDUokr7LJqoF2c3+RInu/t4zfrpYdoSDfYyhg52rwLV6TnOvdG4fXm7IkSB3wErkcmJS9iEhjVtOSEjjA==", "integrity": "sha512-43/qtrDUokr7LJqoF2c3+RInu/t4zfrpYdoSDfYyhg52rwLV6TnOvdG4fXm7IkSB3wErkcmJS9iEhjVtOSEjjA==",
"dev": true, "dev": true,
"license": "Apache-2.0", "license": "Apache-2.0",
"peer": true,
"dependencies": { "dependencies": {
"@eslint/core": "^0.17.0", "@eslint/core": "^0.17.0",
"levn": "^0.4.1" "levn": "^0.4.1"
@@ -966,7 +959,6 @@
"integrity": "sha512-5DyQ4+1JEUzejeK1JGICcideyfUbGixgS9jNgex5nqkW+cY7WZhxBigmieN5Qnw9ZosSNVC9KQKyb+GUaGyKUA==", "integrity": "sha512-5DyQ4+1JEUzejeK1JGICcideyfUbGixgS9jNgex5nqkW+cY7WZhxBigmieN5Qnw9ZosSNVC9KQKyb+GUaGyKUA==",
"dev": true, "dev": true,
"license": "Apache-2.0", "license": "Apache-2.0",
"peer": true,
"engines": { "engines": {
"node": ">=18.18.0" "node": ">=18.18.0"
} }
@@ -977,7 +969,6 @@
"integrity": "sha512-/zUx+yOsIrG4Y43Eh2peDeKCxlRt/gET6aHfaKpuq267qXdYDFViVHfMaLyygZOnl0kGWxFIgsBy8QFuTLUXEQ==", "integrity": "sha512-/zUx+yOsIrG4Y43Eh2peDeKCxlRt/gET6aHfaKpuq267qXdYDFViVHfMaLyygZOnl0kGWxFIgsBy8QFuTLUXEQ==",
"dev": true, "dev": true,
"license": "Apache-2.0", "license": "Apache-2.0",
"peer": true,
"dependencies": { "dependencies": {
"@humanfs/core": "^0.19.1", "@humanfs/core": "^0.19.1",
"@humanwhocodes/retry": "^0.4.0" "@humanwhocodes/retry": "^0.4.0"
@@ -992,7 +983,6 @@
"integrity": "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==", "integrity": "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==",
"dev": true, "dev": true,
"license": "Apache-2.0", "license": "Apache-2.0",
"peer": true,
"engines": { "engines": {
"node": ">=12.22" "node": ">=12.22"
}, },
@@ -1007,7 +997,6 @@
"integrity": "sha512-bV0Tgo9K4hfPCek+aMAn81RppFKv2ySDQeMoSZuvTASywNTnVJCArCZE2FWqpvIatKu7VMRLWlR1EazvVhDyhQ==", "integrity": "sha512-bV0Tgo9K4hfPCek+aMAn81RppFKv2ySDQeMoSZuvTASywNTnVJCArCZE2FWqpvIatKu7VMRLWlR1EazvVhDyhQ==",
"dev": true, "dev": true,
"license": "Apache-2.0", "license": "Apache-2.0",
"peer": true,
"engines": { "engines": {
"node": ">=18.18" "node": ">=18.18"
}, },
@@ -3217,6 +3206,44 @@
"url": "https://paulmillr.com/funding/" "url": "https://paulmillr.com/funding/"
} }
}, },
"node_modules/@nodelib/fs.scandir": {
"version": "2.1.5",
"resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz",
"integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==",
"dev": true,
"license": "MIT",
"dependencies": {
"@nodelib/fs.stat": "2.0.5",
"run-parallel": "^1.1.9"
},
"engines": {
"node": ">= 8"
}
},
"node_modules/@nodelib/fs.stat": {
"version": "2.0.5",
"resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz",
"integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 8"
}
},
"node_modules/@nodelib/fs.walk": {
"version": "1.2.8",
"resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz",
"integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==",
"dev": true,
"license": "MIT",
"dependencies": {
"@nodelib/fs.scandir": "2.1.5",
"fastq": "^1.6.0"
},
"engines": {
"node": ">= 8"
}
},
"node_modules/@nuxt/opencollective": { "node_modules/@nuxt/opencollective": {
"version": "0.4.1", "version": "0.4.1",
"resolved": "https://registry.npmjs.org/@nuxt/opencollective/-/opencollective-0.4.1.tgz", "resolved": "https://registry.npmjs.org/@nuxt/opencollective/-/opencollective-0.4.1.tgz",
@@ -3694,6 +3721,264 @@
"dev": true, "dev": true,
"license": "MIT" "license": "MIT"
}, },
"node_modules/@typescript-eslint/eslint-plugin": {
"version": "8.46.3",
"resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.46.3.tgz",
"integrity": "sha512-sbaQ27XBUopBkRiuY/P9sWGOWUW4rl8fDoHIUmLpZd8uldsTyB4/Zg6bWTegPoTLnKj9Hqgn3QD6cjPNB32Odw==",
"dev": true,
"license": "MIT",
"dependencies": {
"@eslint-community/regexpp": "^4.10.0",
"@typescript-eslint/scope-manager": "8.46.3",
"@typescript-eslint/type-utils": "8.46.3",
"@typescript-eslint/utils": "8.46.3",
"@typescript-eslint/visitor-keys": "8.46.3",
"graphemer": "^1.4.0",
"ignore": "^7.0.0",
"natural-compare": "^1.4.0",
"ts-api-utils": "^2.1.0"
},
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/typescript-eslint"
},
"peerDependencies": {
"@typescript-eslint/parser": "^8.46.3",
"eslint": "^8.57.0 || ^9.0.0",
"typescript": ">=4.8.4 <6.0.0"
}
},
"node_modules/@typescript-eslint/eslint-plugin/node_modules/ignore": {
"version": "7.0.5",
"resolved": "https://registry.npmjs.org/ignore/-/ignore-7.0.5.tgz",
"integrity": "sha512-Hs59xBNfUIunMFgWAbGX5cq6893IbWg4KnrjbYwX3tx0ztorVgTDA6B2sxf8ejHJ4wz8BqGUMYlnzNBer5NvGg==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 4"
}
},
"node_modules/@typescript-eslint/parser": {
"version": "8.46.3",
"resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.46.3.tgz",
"integrity": "sha512-6m1I5RmHBGTnUGS113G04DMu3CpSdxCAU/UvtjNWL4Nuf3MW9tQhiJqRlHzChIkhy6kZSAQmc+I1bcGjE3yNKg==",
"dev": true,
"license": "MIT",
"dependencies": {
"@typescript-eslint/scope-manager": "8.46.3",
"@typescript-eslint/types": "8.46.3",
"@typescript-eslint/typescript-estree": "8.46.3",
"@typescript-eslint/visitor-keys": "8.46.3",
"debug": "^4.3.4"
},
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/typescript-eslint"
},
"peerDependencies": {
"eslint": "^8.57.0 || ^9.0.0",
"typescript": ">=4.8.4 <6.0.0"
}
},
"node_modules/@typescript-eslint/project-service": {
"version": "8.46.3",
"resolved": "https://registry.npmjs.org/@typescript-eslint/project-service/-/project-service-8.46.3.tgz",
"integrity": "sha512-Fz8yFXsp2wDFeUElO88S9n4w1I4CWDTXDqDr9gYvZgUpwXQqmZBr9+NTTql5R3J7+hrJZPdpiWaB9VNhAKYLuQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"@typescript-eslint/tsconfig-utils": "^8.46.3",
"@typescript-eslint/types": "^8.46.3",
"debug": "^4.3.4"
},
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/typescript-eslint"
},
"peerDependencies": {
"typescript": ">=4.8.4 <6.0.0"
}
},
"node_modules/@typescript-eslint/scope-manager": {
"version": "8.46.3",
"resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.46.3.tgz",
"integrity": "sha512-FCi7Y1zgrmxp3DfWfr+3m9ansUUFoy8dkEdeQSgA9gbm8DaHYvZCdkFRQrtKiedFf3Ha6VmoqoAaP68+i+22kg==",
"dev": true,
"license": "MIT",
"dependencies": {
"@typescript-eslint/types": "8.46.3",
"@typescript-eslint/visitor-keys": "8.46.3"
},
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/typescript-eslint"
}
},
"node_modules/@typescript-eslint/tsconfig-utils": {
"version": "8.46.3",
"resolved": "https://registry.npmjs.org/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.46.3.tgz",
"integrity": "sha512-GLupljMniHNIROP0zE7nCcybptolcH8QZfXOpCfhQDAdwJ/ZTlcaBOYebSOZotpti/3HrHSw7D3PZm75gYFsOA==",
"dev": true,
"license": "MIT",
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/typescript-eslint"
},
"peerDependencies": {
"typescript": ">=4.8.4 <6.0.0"
}
},
"node_modules/@typescript-eslint/type-utils": {
"version": "8.46.3",
"resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.46.3.tgz",
"integrity": "sha512-ZPCADbr+qfz3aiTTYNNkCbUt+cjNwI/5McyANNrFBpVxPt7GqpEYz5ZfdwuFyGUnJ9FdDXbGODUu6iRCI6XRXw==",
"dev": true,
"license": "MIT",
"dependencies": {
"@typescript-eslint/types": "8.46.3",
"@typescript-eslint/typescript-estree": "8.46.3",
"@typescript-eslint/utils": "8.46.3",
"debug": "^4.3.4",
"ts-api-utils": "^2.1.0"
},
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/typescript-eslint"
},
"peerDependencies": {
"eslint": "^8.57.0 || ^9.0.0",
"typescript": ">=4.8.4 <6.0.0"
}
},
"node_modules/@typescript-eslint/types": {
"version": "8.46.3",
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.46.3.tgz",
"integrity": "sha512-G7Ok9WN/ggW7e/tOf8TQYMaxgID3Iujn231hfi0Pc7ZheztIJVpO44ekY00b7akqc6nZcvregk0Jpah3kep6hA==",
"dev": true,
"license": "MIT",
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/typescript-eslint"
}
},
"node_modules/@typescript-eslint/typescript-estree": {
"version": "8.46.3",
"resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.46.3.tgz",
"integrity": "sha512-f/NvtRjOm80BtNM5OQtlaBdM5BRFUv7gf381j9wygDNL+qOYSNOgtQ/DCndiYi80iIOv76QqaTmp4fa9hwI0OA==",
"dev": true,
"license": "MIT",
"dependencies": {
"@typescript-eslint/project-service": "8.46.3",
"@typescript-eslint/tsconfig-utils": "8.46.3",
"@typescript-eslint/types": "8.46.3",
"@typescript-eslint/visitor-keys": "8.46.3",
"debug": "^4.3.4",
"fast-glob": "^3.3.2",
"is-glob": "^4.0.3",
"minimatch": "^9.0.4",
"semver": "^7.6.0",
"ts-api-utils": "^2.1.0"
},
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/typescript-eslint"
},
"peerDependencies": {
"typescript": ">=4.8.4 <6.0.0"
}
},
"node_modules/@typescript-eslint/typescript-estree/node_modules/brace-expansion": {
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz",
"integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"balanced-match": "^1.0.0"
}
},
"node_modules/@typescript-eslint/typescript-estree/node_modules/minimatch": {
"version": "9.0.5",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz",
"integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==",
"dev": true,
"license": "ISC",
"dependencies": {
"brace-expansion": "^2.0.1"
},
"engines": {
"node": ">=16 || 14 >=14.17"
},
"funding": {
"url": "https://github.com/sponsors/isaacs"
}
},
"node_modules/@typescript-eslint/utils": {
"version": "8.46.3",
"resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.46.3.tgz",
"integrity": "sha512-VXw7qmdkucEx9WkmR3ld/u6VhRyKeiF1uxWwCy/iuNfokjJ7VhsgLSOTjsol8BunSw190zABzpwdNsze2Kpo4g==",
"dev": true,
"license": "MIT",
"dependencies": {
"@eslint-community/eslint-utils": "^4.7.0",
"@typescript-eslint/scope-manager": "8.46.3",
"@typescript-eslint/types": "8.46.3",
"@typescript-eslint/typescript-estree": "8.46.3"
},
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/typescript-eslint"
},
"peerDependencies": {
"eslint": "^8.57.0 || ^9.0.0",
"typescript": ">=4.8.4 <6.0.0"
}
},
"node_modules/@typescript-eslint/visitor-keys": {
"version": "8.46.3",
"resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.46.3.tgz",
"integrity": "sha512-uk574k8IU0rOF/AjniX8qbLSGURJVUCeM5e4MIMKBFFi8weeiLrG1fyQejyLXQpRZbU/1BuQasleV/RfHC3hHg==",
"dev": true,
"license": "MIT",
"dependencies": {
"@typescript-eslint/types": "8.46.3",
"eslint-visitor-keys": "^4.2.1"
},
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/typescript-eslint"
}
},
"node_modules/@ungap/structured-clone": { "node_modules/@ungap/structured-clone": {
"version": "1.3.0", "version": "1.3.0",
"resolved": "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.3.0.tgz", "resolved": "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.3.0.tgz",
@@ -4187,7 +4472,6 @@
"integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==",
"dev": true, "dev": true,
"license": "MIT", "license": "MIT",
"peer": true,
"peerDependencies": { "peerDependencies": {
"acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0"
} }
@@ -5528,8 +5812,7 @@
"resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz",
"integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==", "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==",
"dev": true, "dev": true,
"license": "MIT", "license": "MIT"
"peer": true
}, },
"node_modules/deepmerge": { "node_modules/deepmerge": {
"version": "4.3.1", "version": "4.3.1",
@@ -5843,7 +6126,6 @@
"integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==",
"dev": true, "dev": true,
"license": "MIT", "license": "MIT",
"peer": true,
"engines": { "engines": {
"node": ">=10" "node": ">=10"
}, },
@@ -5857,7 +6139,6 @@
"integrity": "sha512-BhHmn2yNOFA9H9JmmIVKJmd288g9hrVRDkdoIgRCRuSySRUHH7r/DI6aAXW9T1WwUuY3DFgrcaqB+deURBLR5g==", "integrity": "sha512-BhHmn2yNOFA9H9JmmIVKJmd288g9hrVRDkdoIgRCRuSySRUHH7r/DI6aAXW9T1WwUuY3DFgrcaqB+deURBLR5g==",
"dev": true, "dev": true,
"license": "MIT", "license": "MIT",
"peer": true,
"dependencies": { "dependencies": {
"@eslint-community/eslint-utils": "^4.8.0", "@eslint-community/eslint-utils": "^4.8.0",
"@eslint-community/regexpp": "^4.12.1", "@eslint-community/regexpp": "^4.12.1",
@@ -5980,7 +6261,6 @@
"integrity": "sha512-Uhdk5sfqcee/9H/rCOJikYz67o0a2Tw2hGRPOG2Y1R2dg7brRe1uG0yaNQDHu+TO/uQPF/5eCapvYSmHUjt7JQ==", "integrity": "sha512-Uhdk5sfqcee/9H/rCOJikYz67o0a2Tw2hGRPOG2Y1R2dg7brRe1uG0yaNQDHu+TO/uQPF/5eCapvYSmHUjt7JQ==",
"dev": true, "dev": true,
"license": "Apache-2.0", "license": "Apache-2.0",
"peer": true,
"engines": { "engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0" "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
}, },
@@ -5994,7 +6274,6 @@
"integrity": "sha512-sNXOfKCn74rt8RICKMvJS7XKV/Xk9kA7DyJr8mJik3S7Cwgy3qlkkmyS2uQB3jiJg6VNdZd/pDBJu0nvG2NlTg==", "integrity": "sha512-sNXOfKCn74rt8RICKMvJS7XKV/Xk9kA7DyJr8mJik3S7Cwgy3qlkkmyS2uQB3jiJg6VNdZd/pDBJu0nvG2NlTg==",
"dev": true, "dev": true,
"license": "BSD-2-Clause", "license": "BSD-2-Clause",
"peer": true,
"dependencies": { "dependencies": {
"esrecurse": "^4.3.0", "esrecurse": "^4.3.0",
"estraverse": "^5.2.0" "estraverse": "^5.2.0"
@@ -6012,7 +6291,6 @@
"integrity": "sha512-j6PAQ2uUr79PZhBjP5C5fhl8e39FmRnOjsD5lGnWrFU8i2G776tBK7+nP8KuQUTTyAZUwfQqXAgrVH5MbH9CYQ==", "integrity": "sha512-j6PAQ2uUr79PZhBjP5C5fhl8e39FmRnOjsD5lGnWrFU8i2G776tBK7+nP8KuQUTTyAZUwfQqXAgrVH5MbH9CYQ==",
"dev": true, "dev": true,
"license": "BSD-2-Clause", "license": "BSD-2-Clause",
"peer": true,
"dependencies": { "dependencies": {
"acorn": "^8.15.0", "acorn": "^8.15.0",
"acorn-jsx": "^5.3.2", "acorn-jsx": "^5.3.2",
@@ -6045,7 +6323,6 @@
"integrity": "sha512-ca9pw9fomFcKPvFLXhBKUK90ZvGibiGOvRJNbjljY7s7uq/5YO4BOzcYtJqExdx99rF6aAcnRxHmcUHcz6sQsg==", "integrity": "sha512-ca9pw9fomFcKPvFLXhBKUK90ZvGibiGOvRJNbjljY7s7uq/5YO4BOzcYtJqExdx99rF6aAcnRxHmcUHcz6sQsg==",
"dev": true, "dev": true,
"license": "BSD-3-Clause", "license": "BSD-3-Clause",
"peer": true,
"dependencies": { "dependencies": {
"estraverse": "^5.1.0" "estraverse": "^5.1.0"
}, },
@@ -6082,7 +6359,6 @@
"integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==",
"dev": true, "dev": true,
"license": "BSD-2-Clause", "license": "BSD-2-Clause",
"peer": true,
"engines": { "engines": {
"node": ">=0.10.0" "node": ">=0.10.0"
} }
@@ -6219,6 +6495,36 @@
"dev": true, "dev": true,
"license": "Apache-2.0" "license": "Apache-2.0"
}, },
"node_modules/fast-glob": {
"version": "3.3.3",
"resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.3.tgz",
"integrity": "sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg==",
"dev": true,
"license": "MIT",
"dependencies": {
"@nodelib/fs.stat": "^2.0.2",
"@nodelib/fs.walk": "^1.2.3",
"glob-parent": "^5.1.2",
"merge2": "^1.3.0",
"micromatch": "^4.0.8"
},
"engines": {
"node": ">=8.6.0"
}
},
"node_modules/fast-glob/node_modules/glob-parent": {
"version": "5.1.2",
"resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz",
"integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==",
"dev": true,
"license": "ISC",
"dependencies": {
"is-glob": "^4.0.1"
},
"engines": {
"node": ">= 6"
}
},
"node_modules/fast-json-stable-stringify": { "node_modules/fast-json-stable-stringify": {
"version": "2.1.0", "version": "2.1.0",
"resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz",
@@ -6231,8 +6537,7 @@
"resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz",
"integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==", "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==",
"dev": true, "dev": true,
"license": "MIT", "license": "MIT"
"peer": true
}, },
"node_modules/fast-safe-stringify": { "node_modules/fast-safe-stringify": {
"version": "2.1.1", "version": "2.1.1",
@@ -6257,6 +6562,16 @@
], ],
"license": "BSD-3-Clause" "license": "BSD-3-Clause"
}, },
"node_modules/fastq": {
"version": "1.19.1",
"resolved": "https://registry.npmjs.org/fastq/-/fastq-1.19.1.tgz",
"integrity": "sha512-GwLTyxkCXjXbxqIhTsMI2Nui8huMPtnxg7krajPJAjnEG/iiOS7i+zCtWGZR9G0NBKbXKh6X9m9UIsYX/N6vvQ==",
"dev": true,
"license": "ISC",
"dependencies": {
"reusify": "^1.0.4"
}
},
"node_modules/fb-watchman": { "node_modules/fb-watchman": {
"version": "2.0.2", "version": "2.0.2",
"resolved": "https://registry.npmjs.org/fb-watchman/-/fb-watchman-2.0.2.tgz", "resolved": "https://registry.npmjs.org/fb-watchman/-/fb-watchman-2.0.2.tgz",
@@ -6279,7 +6594,6 @@
"integrity": "sha512-XXTUwCvisa5oacNGRP9SfNtYBNAMi+RPwBFmblZEF7N7swHYQS6/Zfk7SRwx4D5j3CH211YNRco1DEMNVfZCnQ==", "integrity": "sha512-XXTUwCvisa5oacNGRP9SfNtYBNAMi+RPwBFmblZEF7N7swHYQS6/Zfk7SRwx4D5j3CH211YNRco1DEMNVfZCnQ==",
"dev": true, "dev": true,
"license": "MIT", "license": "MIT",
"peer": true,
"dependencies": { "dependencies": {
"flat-cache": "^4.0.0" "flat-cache": "^4.0.0"
}, },
@@ -6341,7 +6655,6 @@
"integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==",
"dev": true, "dev": true,
"license": "MIT", "license": "MIT",
"peer": true,
"dependencies": { "dependencies": {
"locate-path": "^6.0.0", "locate-path": "^6.0.0",
"path-exists": "^4.0.0" "path-exists": "^4.0.0"
@@ -6359,7 +6672,6 @@
"integrity": "sha512-f7ccFPK3SXFHpx15UIGyRJ/FJQctuKZ0zVuN3frBo4HnK3cay9VEW0R6yPYFHC0AgqhukPzKjq22t5DmAyqGyw==", "integrity": "sha512-f7ccFPK3SXFHpx15UIGyRJ/FJQctuKZ0zVuN3frBo4HnK3cay9VEW0R6yPYFHC0AgqhukPzKjq22t5DmAyqGyw==",
"dev": true, "dev": true,
"license": "MIT", "license": "MIT",
"peer": true,
"dependencies": { "dependencies": {
"flatted": "^3.2.9", "flatted": "^3.2.9",
"keyv": "^4.5.4" "keyv": "^4.5.4"
@@ -6373,8 +6685,7 @@
"resolved": "https://registry.npmjs.org/flatted/-/flatted-3.3.3.tgz", "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.3.3.tgz",
"integrity": "sha512-GX+ysw4PBCz0PzosHDepZGANEuFCMLrnRTiEy9McGjmkCQYwRq4A/X786G/fjM/+OjsWSU1ZrY5qyARZmO/uwg==", "integrity": "sha512-GX+ysw4PBCz0PzosHDepZGANEuFCMLrnRTiEy9McGjmkCQYwRq4A/X786G/fjM/+OjsWSU1ZrY5qyARZmO/uwg==",
"dev": true, "dev": true,
"license": "ISC", "license": "ISC"
"peer": true
}, },
"node_modules/follow-redirects": { "node_modules/follow-redirects": {
"version": "1.15.9", "version": "1.15.9",
@@ -6717,7 +7028,6 @@
"integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==",
"dev": true, "dev": true,
"license": "ISC", "license": "ISC",
"peer": true,
"dependencies": { "dependencies": {
"is-glob": "^4.0.3" "is-glob": "^4.0.3"
}, },
@@ -6764,7 +7074,6 @@
"integrity": "sha512-oahGvuMGQlPw/ivIYBjVSrWAfWLBeku5tpPE2fOPLi+WHffIWbuh2tCjhyQhTBPMf5E9jDEH4FOmTYgYwbKwtQ==", "integrity": "sha512-oahGvuMGQlPw/ivIYBjVSrWAfWLBeku5tpPE2fOPLi+WHffIWbuh2tCjhyQhTBPMf5E9jDEH4FOmTYgYwbKwtQ==",
"dev": true, "dev": true,
"license": "MIT", "license": "MIT",
"peer": true,
"engines": { "engines": {
"node": ">=18" "node": ">=18"
}, },
@@ -6791,6 +7100,13 @@
"dev": true, "dev": true,
"license": "ISC" "license": "ISC"
}, },
"node_modules/graphemer": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/graphemer/-/graphemer-1.4.0.tgz",
"integrity": "sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==",
"dev": true,
"license": "MIT"
},
"node_modules/guid-typescript": { "node_modules/guid-typescript": {
"version": "1.0.9", "version": "1.0.9",
"resolved": "https://registry.npmjs.org/guid-typescript/-/guid-typescript-1.0.9.tgz", "resolved": "https://registry.npmjs.org/guid-typescript/-/guid-typescript-1.0.9.tgz",
@@ -6961,7 +7277,6 @@
"integrity": "sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==", "integrity": "sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==",
"dev": true, "dev": true,
"license": "MIT", "license": "MIT",
"peer": true,
"engines": { "engines": {
"node": ">= 4" "node": ">= 4"
} }
@@ -7120,7 +7435,6 @@
"integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==",
"dev": true, "dev": true,
"license": "MIT", "license": "MIT",
"peer": true,
"engines": { "engines": {
"node": ">=0.10.0" "node": ">=0.10.0"
} }
@@ -7168,7 +7482,6 @@
"integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==",
"dev": true, "dev": true,
"license": "MIT", "license": "MIT",
"peer": true,
"dependencies": { "dependencies": {
"is-extglob": "^2.1.1" "is-extglob": "^2.1.1"
}, },
@@ -8947,8 +9260,7 @@
"resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz", "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz",
"integrity": "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==", "integrity": "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==",
"dev": true, "dev": true,
"license": "MIT", "license": "MIT"
"peer": true
}, },
"node_modules/json-parse-even-better-errors": { "node_modules/json-parse-even-better-errors": {
"version": "2.3.1", "version": "2.3.1",
@@ -8969,8 +9281,7 @@
"resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz",
"integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==", "integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==",
"dev": true, "dev": true,
"license": "MIT", "license": "MIT"
"peer": true
}, },
"node_modules/json5": { "node_modules/json5": {
"version": "2.2.3", "version": "2.2.3",
@@ -9054,7 +9365,6 @@
"integrity": "sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==", "integrity": "sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==",
"dev": true, "dev": true,
"license": "MIT", "license": "MIT",
"peer": true,
"dependencies": { "dependencies": {
"json-buffer": "3.0.1" "json-buffer": "3.0.1"
} }
@@ -9075,7 +9385,6 @@
"integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==", "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==",
"dev": true, "dev": true,
"license": "MIT", "license": "MIT",
"peer": true,
"dependencies": { "dependencies": {
"prelude-ls": "^1.2.1", "prelude-ls": "^1.2.1",
"type-check": "~0.4.0" "type-check": "~0.4.0"
@@ -9132,7 +9441,6 @@
"integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==",
"dev": true, "dev": true,
"license": "MIT", "license": "MIT",
"peer": true,
"dependencies": { "dependencies": {
"p-locate": "^5.0.0" "p-locate": "^5.0.0"
}, },
@@ -9209,8 +9517,7 @@
"resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz",
"integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==",
"dev": true, "dev": true,
"license": "MIT", "license": "MIT"
"peer": true
}, },
"node_modules/lodash.once": { "node_modules/lodash.once": {
"version": "4.1.1", "version": "4.1.1",
@@ -9364,6 +9671,16 @@
"dev": true, "dev": true,
"license": "MIT" "license": "MIT"
}, },
"node_modules/merge2": {
"version": "1.4.1",
"resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz",
"integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 8"
}
},
"node_modules/methods": { "node_modules/methods": {
"version": "1.1.2", "version": "1.1.2",
"resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz",
@@ -9776,7 +10093,6 @@
"integrity": "sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g==", "integrity": "sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g==",
"dev": true, "dev": true,
"license": "MIT", "license": "MIT",
"peer": true,
"dependencies": { "dependencies": {
"deep-is": "^0.1.3", "deep-is": "^0.1.3",
"fast-levenshtein": "^2.0.6", "fast-levenshtein": "^2.0.6",
@@ -9858,7 +10174,6 @@
"integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==",
"dev": true, "dev": true,
"license": "MIT", "license": "MIT",
"peer": true,
"dependencies": { "dependencies": {
"p-limit": "^3.0.2" "p-limit": "^3.0.2"
}, },
@@ -10343,7 +10658,6 @@
"integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==", "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==",
"dev": true, "dev": true,
"license": "MIT", "license": "MIT",
"peer": true,
"engines": { "engines": {
"node": ">= 0.8.0" "node": ">= 0.8.0"
} }
@@ -10523,6 +10837,27 @@
"node": ">=0.4.x" "node": ">=0.4.x"
} }
}, },
"node_modules/queue-microtask": {
"version": "1.2.3",
"resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz",
"integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==",
"dev": true,
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/feross"
},
{
"type": "patreon",
"url": "https://www.patreon.com/feross"
},
{
"type": "consulting",
"url": "https://feross.org/support"
}
],
"license": "MIT"
},
"node_modules/randombytes": { "node_modules/randombytes": {
"version": "2.1.0", "version": "2.1.0",
"resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz", "resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz",
@@ -10699,6 +11034,17 @@
"dev": true, "dev": true,
"license": "ISC" "license": "ISC"
}, },
"node_modules/reusify": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/reusify/-/reusify-1.1.0.tgz",
"integrity": "sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==",
"dev": true,
"license": "MIT",
"engines": {
"iojs": ">=1.0.0",
"node": ">=0.10.0"
}
},
"node_modules/rimraf": { "node_modules/rimraf": {
"version": "6.0.1", "version": "6.0.1",
"resolved": "https://registry.npmjs.org/rimraf/-/rimraf-6.0.1.tgz", "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-6.0.1.tgz",
@@ -10735,6 +11081,30 @@
"node": ">= 18" "node": ">= 18"
} }
}, },
"node_modules/run-parallel": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz",
"integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==",
"dev": true,
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/feross"
},
{
"type": "patreon",
"url": "https://www.patreon.com/feross"
},
{
"type": "consulting",
"url": "https://feross.org/support"
}
],
"license": "MIT",
"dependencies": {
"queue-microtask": "^1.2.2"
}
},
"node_modules/rxjs": { "node_modules/rxjs": {
"version": "7.8.2", "version": "7.8.2",
"resolved": "https://registry.npmjs.org/rxjs/-/rxjs-7.8.2.tgz", "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-7.8.2.tgz",
@@ -11741,6 +12111,19 @@
"tree-kill": "cli.js" "tree-kill": "cli.js"
} }
}, },
"node_modules/ts-api-utils": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-2.1.0.tgz",
"integrity": "sha512-CUgTZL1irw8u29bzrOD/nH85jqyc74D6SshFgujOIA7osm2Rz7dYH77agkx7H4FBNxDq7Cjf+IjaX/8zwFW+ZQ==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=18.12"
},
"peerDependencies": {
"typescript": ">=4.8.4"
}
},
"node_modules/ts-jest": { "node_modules/ts-jest": {
"version": "29.4.5", "version": "29.4.5",
"resolved": "https://registry.npmjs.org/ts-jest/-/ts-jest-29.4.5.tgz", "resolved": "https://registry.npmjs.org/ts-jest/-/ts-jest-29.4.5.tgz",
@@ -11974,7 +12357,6 @@
"integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==", "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==",
"dev": true, "dev": true,
"license": "MIT", "license": "MIT",
"peer": true,
"dependencies": { "dependencies": {
"prelude-ls": "^1.2.1" "prelude-ls": "^1.2.1"
}, },
@@ -12792,7 +13174,6 @@
"integrity": "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==", "integrity": "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==",
"dev": true, "dev": true,
"license": "MIT", "license": "MIT",
"peer": true,
"engines": { "engines": {
"node": ">=0.10.0" "node": ">=0.10.0"
} }

View File

@@ -79,6 +79,9 @@
"@types/multer": "^1.4.12", "@types/multer": "^1.4.12",
"@types/node": "^22.14.0", "@types/node": "^22.14.0",
"@types/supertest": "^2.0.10", "@types/supertest": "^2.0.10",
"@typescript-eslint/eslint-plugin": "^8.46.3",
"@typescript-eslint/parser": "^8.46.3",
"eslint": "^9.39.1",
"eslint-config-prettier": "^6.15.0", "eslint-config-prettier": "^6.15.0",
"eslint-plugin-prettier": "^3.1.4", "eslint-plugin-prettier": "^3.1.4",
"jest": "^30.2.0", "jest": "^30.2.0",
@@ -109,6 +112,10 @@
"testEnvironment": "node", "testEnvironment": "node",
"moduleNameMapper": { "moduleNameMapper": {
"^src/(.*)$": "<rootDir>/$1" "^src/(.*)$": "<rootDir>/$1"
} },
"transformIgnorePatterns": [
"node_modules/(?!(typeorm|@nestjs)/)"
],
"setupFilesAfterEnv": ["../jest.setup.js"]
} }
} }

View File

@@ -1,6 +0,0 @@
export interface ILogger {
log(message: string): void;
warn(message: string): void;
error(message: string, trace?: string): void;
}

View File

@@ -1,26 +0,0 @@
import { Logger } from '@nestjs/common';
import { ILogger } from './ILogger';
export class NestLoggerAdapter implements ILogger {
private readonly logger: Logger;
constructor(private readonly context: string) {
this.logger = new Logger(context);
}
log(message: string, meta?: Record<string, any>): void {
this.logger.log(this.formatMessage(message, meta));
}
warn(message: string, meta?: Record<string, any>): void {
this.logger.warn(this.formatMessage(message, meta));
}
error(message: string, trace?: string, meta?: Record<string, any>): void {
this.logger.error(this.formatMessage(message, meta), trace);
}
private formatMessage(message: string, meta?: Record<string, any>): string {
return meta ? `${message} | ${JSON.stringify(meta)}` : message;
}
}

View File

@@ -1,22 +0,0 @@
import { ILogger } from './ILogger';
export function LogExecution(label?: string) {
return function (
target: any,
propertyKey: string,
descriptor: PropertyDescriptor
) {
const original = descriptor.value;
descriptor.value = async function (...args: any[]) {
const logger: ILogger = this.logger;
const context = label || `${target.constructor.name}.${propertyKey}`;
const start = Date.now();
logger.log(`Iniciando: ${context} | ${JSON.stringify({ args })}`);
const result = await original.apply(this, args);
const duration = Date.now() - start;
logger.log(`Finalizado: ${context} em ${duration}ms`);
return result;
};
return descriptor;
};
}

View File

@@ -1,14 +0,0 @@
import { Module } from '@nestjs/common';
import { NestLoggerAdapter } from './NestLoggerAdapter';
import { ILogger } from './ILogger';
@Module({
providers: [
{
provide: 'LoggerService',
useFactory: () => new NestLoggerAdapter('DataConsultService'),
},
],
exports: ['LoggerService'],
})
export class LoggerModule {}

View File

@@ -8,32 +8,23 @@ import { OrdersPaymentModule } from './orders-payment/orders-payment.module';
import { AuthModule } from './auth/auth/auth.module'; import { AuthModule } from './auth/auth/auth.module';
import { DataConsultModule } from './data-consult/data-consult.module'; import { DataConsultModule } from './data-consult/data-consult.module';
import { OrdersModule } from './orders/modules/orders.module'; import { OrdersModule } from './orders/modules/orders.module';
import { OcorrencesController } from './crm/occurrences/ocorrences.controller';
import { OccurrencesModule } from './crm/occurrences/occurrences.module';
import { ReasonTableModule } from './crm/reason-table/reason-table.module';
import { NegotiationsModule } from './crm/negotiations/negotiations.module';
import { HttpModule } from '@nestjs/axios'; import { HttpModule } from '@nestjs/axios';
import { DebModule } from './orders/modules/deb.module'; import { DebModule } from './orders/modules/deb.module';
import { LogisticController } from './logistic/logistic.controller'; import { LogisticController } from './logistic/logistic.controller';
import { LogisticService } from './logistic/logistic.service'; import { LogisticService } from './logistic/logistic.service';
import { LoggerModule } from './Log/logger.module';
import jwtConfig from './auth/jwt.config'; import jwtConfig from './auth/jwt.config';
import { UsersModule } from './auth/users/users.module'; import { UsersModule } from './auth/users/users.module';
import { ProductsModule } from './products/products.module'; import { ProductsModule } from './products/products.module';
import { ThrottlerModule, ThrottlerModuleOptions } from '@nestjs/throttler'; import { ThrottlerModule, ThrottlerModuleOptions } from '@nestjs/throttler';
import { RateLimiterMiddleware } from './common/middlewares/rate-limiter.middleware'; import { RateLimiterMiddleware } from './common/middlewares/rate-limiter.middleware';
import { RequestSanitizerMiddleware } from './common/middlewares/request-sanitizer.middleware'; import { RequestSanitizerMiddleware } from './common/middlewares/request-sanitizer.middleware';
import { HealthModule } from './health/health.module';
import { clientes } from './data-consult/clientes.module'; import { clientes } from './data-consult/clientes.module';
import { PartnersModule } from './partners/partners.module'; import { PartnersModule } from './partners/partners.module';
@Module({ @Module({
imports: [ imports: [
UsersModule, UsersModule,
ConfigModule.forRoot({ isGlobal: true, ConfigModule.forRoot({ isGlobal: true, load: [jwtConfig] }),
load: [jwtConfig]
}),
TypeOrmModule.forRootAsync({ TypeOrmModule.forRootAsync({
name: 'oracle', name: 'oracle',
inject: [ConfigService], inject: [ConfigService],
@@ -62,28 +53,19 @@ import { PartnersModule } from './partners/partners.module';
OrdersModule, OrdersModule,
clientes, clientes,
ProductsModule, ProductsModule,
NegotiationsModule,
OccurrencesModule,
ReasonTableModule,
LoggerModule,
DataConsultModule, DataConsultModule,
AuthModule, AuthModule,
DebModule, DebModule,
OrdersModule, OrdersModule,
HealthModule,
PartnersModule, PartnersModule,
], ],
controllers: [OcorrencesController, LogisticController ], controllers: [LogisticController],
providers: [ LogisticService,], providers: [LogisticService],
}) })
export class AppModule implements NestModule { export class AppModule implements NestModule {
configure(consumer: MiddlewareConsumer) { configure(consumer: MiddlewareConsumer) {
consumer consumer.apply(RequestSanitizerMiddleware).forRoutes('*');
.apply(RequestSanitizerMiddleware)
.forRoutes('*');
consumer consumer.apply(RateLimiterMiddleware).forRoutes('auth', 'users');
.apply(RateLimiterMiddleware)
.forRoutes('auth', 'users');
} }
} }

View File

@@ -46,7 +46,9 @@ export interface AuthServiceTestContext {
mockUserRepository: ReturnType<typeof createMockUserRepository>; mockUserRepository: ReturnType<typeof createMockUserRepository>;
mockTokenBlacklistService: ReturnType<typeof createMockTokenBlacklistService>; mockTokenBlacklistService: ReturnType<typeof createMockTokenBlacklistService>;
mockRefreshTokenService: ReturnType<typeof createMockRefreshTokenService>; mockRefreshTokenService: ReturnType<typeof createMockRefreshTokenService>;
mockSessionManagementService: ReturnType<typeof createMockSessionManagementService>; mockSessionManagementService: ReturnType<
typeof createMockSessionManagementService
>;
} }
export async function createAuthServiceTestModule(): Promise<AuthServiceTestContext> { export async function createAuthServiceTestModule(): Promise<AuthServiceTestContext> {
@@ -101,4 +103,3 @@ export async function createAuthServiceTestModule(): Promise<AuthServiceTestCont
mockSessionManagementService, mockSessionManagementService,
}; };
} }

View File

@@ -29,7 +29,7 @@ describe('AuthService - createToken', () => {
username, username,
email, email,
storeId, storeId,
sessionId sessionId,
); );
expect(context.mockJwtService.sign).toHaveBeenCalledWith( expect(context.mockJwtService.sign).toHaveBeenCalledWith(
@@ -41,7 +41,7 @@ describe('AuthService - createToken', () => {
email: email, email: email,
sessionId: sessionId, sessionId: sessionId,
}, },
{ expiresIn: '8h' } { expiresIn: '8h' },
); );
expect(result).toBe(mockToken); expect(result).toBe(mockToken);
}); });
@@ -61,7 +61,7 @@ describe('AuthService - createToken', () => {
sellerId, sellerId,
username, username,
email, email,
storeId storeId,
); );
expect(context.mockJwtService.sign).toHaveBeenCalledWith( expect(context.mockJwtService.sign).toHaveBeenCalledWith(
@@ -73,7 +73,7 @@ describe('AuthService - createToken', () => {
email: email, email: email,
sessionId: undefined, sessionId: undefined,
}, },
{ expiresIn: '8h' } { expiresIn: '8h' },
); );
expect(result).toBe(mockToken); expect(result).toBe(mockToken);
}); });
@@ -93,12 +93,12 @@ describe('AuthService - createToken', () => {
sellerId, sellerId,
username, username,
email, email,
storeId storeId,
); );
expect(context.mockJwtService.sign).toHaveBeenCalledWith( expect(context.mockJwtService.sign).toHaveBeenCalledWith(
expect.any(Object), expect.any(Object),
{ expiresIn: '8h' } { expiresIn: '8h' },
); );
}); });
@@ -119,7 +119,7 @@ describe('AuthService - createToken', () => {
username, username,
email, email,
storeId, storeId,
sessionId sessionId,
); );
const signCall = context.mockJwtService.sign.mock.calls[0]; const signCall = context.mockJwtService.sign.mock.calls[0];
@@ -150,7 +150,7 @@ describe('AuthService - createToken', () => {
username, username,
email, email,
storeId, storeId,
sessionId sessionId,
); );
expect(context.mockJwtService.sign).toHaveBeenCalledWith( expect(context.mockJwtService.sign).toHaveBeenCalledWith(
@@ -162,7 +162,7 @@ describe('AuthService - createToken', () => {
email: email, email: email,
sessionId: sessionId, sessionId: sessionId,
}, },
{ expiresIn: '8h' } { expiresIn: '8h' },
); );
expect(result).toBe(mockToken); expect(result).toBe(mockToken);
}); });
@@ -171,7 +171,13 @@ describe('AuthService - createToken', () => {
const mockToken = 'mock.jwt.token.once'; const mockToken = 'mock.jwt.token.once';
context.mockJwtService.sign.mockReturnValue(mockToken); context.mockJwtService.sign.mockReturnValue(mockToken);
await context.service.createToken(1, 100, 'test', 'test@test.com', 'STORE001'); await context.service.createToken(
1,
100,
'test',
'test@test.com',
'STORE001',
);
expect(context.mockJwtService.sign).toHaveBeenCalledTimes(1); expect(context.mockJwtService.sign).toHaveBeenCalledTimes(1);
}); });
@@ -183,7 +189,7 @@ describe('AuthService - createToken', () => {
* de validação no método createToken. Atualmente, o método não valida * de validação no método createToken. Atualmente, o método não valida
* os parâmetros de entrada, o que pode causar problemas de segurança * os parâmetros de entrada, o que pode causar problemas de segurança
* e tokens inválidos. * e tokens inválidos.
* *
* PROBLEMAS IDENTIFICADOS: * PROBLEMAS IDENTIFICADOS:
* 1. Não valida se IDs são positivos * 1. Não valida se IDs são positivos
* 2. Não valida se strings estão vazias * 2. Não valida se strings estão vazias
@@ -199,7 +205,13 @@ describe('AuthService - createToken', () => {
const negativeId = -1; const negativeId = -1;
await expect( await expect(
context.service.createToken(negativeId, 100, 'test', 'test@test.com', 'STORE001') context.service.createToken(
negativeId,
100,
'test',
'test@test.com',
'STORE001',
),
).rejects.toThrow('ID de usuário inválido'); ).rejects.toThrow('ID de usuário inválido');
}); });
@@ -207,7 +219,13 @@ describe('AuthService - createToken', () => {
const zeroId = 0; const zeroId = 0;
await expect( await expect(
context.service.createToken(zeroId, 100, 'test', 'test@test.com', 'STORE001') context.service.createToken(
zeroId,
100,
'test',
'test@test.com',
'STORE001',
),
).rejects.toThrow('ID de usuário inválido'); ).rejects.toThrow('ID de usuário inválido');
}); });
@@ -215,15 +233,93 @@ describe('AuthService - createToken', () => {
const negativeSellerId = -1; const negativeSellerId = -1;
await expect( await expect(
context.service.createToken(1, negativeSellerId, 'test', 'test@test.com', 'STORE001') context.service.createToken(
1,
negativeSellerId,
'test',
'test@test.com',
'STORE001',
),
).rejects.toThrow('ID de vendedor inválido'); ).rejects.toThrow('ID de vendedor inválido');
}); });
it('should accept null seller ID', async () => {
const mockToken = 'mock.jwt.token.null.seller';
const userId = 1427;
const sellerId = null;
const username = 'brunelle.c';
const email = 'brunelle.c@jurunense.com.br';
const storeId = '12';
const sessionId = 'session-null-seller';
context.mockJwtService.sign.mockReturnValue(mockToken);
const result = await context.service.createToken(
userId,
sellerId,
username,
email,
storeId,
sessionId,
);
expect(context.mockJwtService.sign).toHaveBeenCalledWith(
{
id: userId,
sellerId: null,
storeId: storeId,
username: username,
email: email,
sessionId: sessionId,
},
{ expiresIn: '8h' },
);
expect(result).toBe(mockToken);
});
it('should accept undefined seller ID', async () => {
const mockToken = 'mock.jwt.token.undefined.seller';
const userId = 1427;
const sellerId = undefined;
const username = 'brunelle.c';
const email = 'brunelle.c@jurunense.com.br';
const storeId = '12';
context.mockJwtService.sign.mockReturnValue(mockToken);
const result = await context.service.createToken(
userId,
sellerId as any,
username,
email,
storeId,
);
expect(context.mockJwtService.sign).toHaveBeenCalledWith(
{
id: userId,
sellerId: undefined,
storeId: storeId,
username: username,
email: email,
sessionId: undefined,
},
{ expiresIn: '8h' },
);
expect(result).toBe(mockToken);
});
it('should reject empty username', async () => { it('should reject empty username', async () => {
const emptyUsername = ''; const emptyUsername = '';
await expect( await expect(
context.service.createToken(1, 100, emptyUsername, 'test@test.com', 'STORE001') context.service.createToken(
1,
100,
emptyUsername,
'test@test.com',
'STORE001',
),
).rejects.toThrow('Nome de usuário não pode estar vazio'); ).rejects.toThrow('Nome de usuário não pode estar vazio');
}); });
@@ -231,7 +327,13 @@ describe('AuthService - createToken', () => {
const whitespaceUsername = ' '; const whitespaceUsername = ' ';
await expect( await expect(
context.service.createToken(1, 100, whitespaceUsername, 'test@test.com', 'STORE001') context.service.createToken(
1,
100,
whitespaceUsername,
'test@test.com',
'STORE001',
),
).rejects.toThrow('Nome de usuário não pode estar vazio'); ).rejects.toThrow('Nome de usuário não pode estar vazio');
}); });
@@ -239,7 +341,7 @@ describe('AuthService - createToken', () => {
const emptyEmail = ''; const emptyEmail = '';
await expect( await expect(
context.service.createToken(1, 100, 'test', emptyEmail, 'STORE001') context.service.createToken(1, 100, 'test', emptyEmail, 'STORE001'),
).rejects.toThrow('Email não pode estar vazio'); ).rejects.toThrow('Email não pode estar vazio');
}); });
@@ -247,7 +349,7 @@ describe('AuthService - createToken', () => {
const invalidEmail = 'not-an-email'; const invalidEmail = 'not-an-email';
await expect( await expect(
context.service.createToken(1, 100, 'test', invalidEmail, 'STORE001') context.service.createToken(1, 100, 'test', invalidEmail, 'STORE001'),
).rejects.toThrow('Formato de email inválido'); ).rejects.toThrow('Formato de email inválido');
}); });
@@ -255,7 +357,7 @@ describe('AuthService - createToken', () => {
const invalidEmail = 'testemail.com'; const invalidEmail = 'testemail.com';
await expect( await expect(
context.service.createToken(1, 100, 'test', invalidEmail, 'STORE001') context.service.createToken(1, 100, 'test', invalidEmail, 'STORE001'),
).rejects.toThrow('Formato de email inválido'); ).rejects.toThrow('Formato de email inválido');
}); });
@@ -263,19 +365,37 @@ describe('AuthService - createToken', () => {
const emptyStoreId = ''; const emptyStoreId = '';
await expect( await expect(
context.service.createToken(1, 100, 'test', 'test@test.com', emptyStoreId) context.service.createToken(
1,
100,
'test',
'test@test.com',
emptyStoreId,
),
).rejects.toThrow('ID da loja não pode estar vazio'); ).rejects.toThrow('ID da loja não pode estar vazio');
}); });
it('should reject null username', async () => { it('should reject null username', async () => {
await expect( await expect(
context.service.createToken(1, 100, null as any, 'test@test.com', 'STORE001') context.service.createToken(
1,
100,
null as any,
'test@test.com',
'STORE001',
),
).rejects.toThrow('Nome de usuário não pode estar vazio'); ).rejects.toThrow('Nome de usuário não pode estar vazio');
}); });
it('should reject undefined email', async () => { it('should reject undefined email', async () => {
await expect( await expect(
context.service.createToken(1, 100, 'test', undefined as any, 'STORE001') context.service.createToken(
1,
100,
'test',
undefined as any,
'STORE001',
),
).rejects.toThrow('Email não pode estar vazio'); ).rejects.toThrow('Email não pode estar vazio');
}); });
@@ -283,7 +403,13 @@ describe('AuthService - createToken', () => {
const specialCharsOnly = '@#$%'; const specialCharsOnly = '@#$%';
await expect( await expect(
context.service.createToken(1, 100, specialCharsOnly, 'test@test.com', 'STORE001') context.service.createToken(
1,
100,
specialCharsOnly,
'test@test.com',
'STORE001',
),
).rejects.toThrow('Nome de usuário inválido'); ).rejects.toThrow('Nome de usuário inválido');
}); });
@@ -291,7 +417,13 @@ describe('AuthService - createToken', () => {
const longUsername = 'a'.repeat(10000); const longUsername = 'a'.repeat(10000);
await expect( await expect(
context.service.createToken(1, 100, longUsername, 'test@test.com', 'STORE001') context.service.createToken(
1,
100,
longUsername,
'test@test.com',
'STORE001',
),
).rejects.toThrow('Nome de usuário muito longo'); ).rejects.toThrow('Nome de usuário muito longo');
}); });
@@ -299,7 +431,7 @@ describe('AuthService - createToken', () => {
const longEmail = 'a'.repeat(10000) + '@test.com'; const longEmail = 'a'.repeat(10000) + '@test.com';
await expect( await expect(
context.service.createToken(1, 100, 'test', longEmail, 'STORE001') context.service.createToken(1, 100, 'test', longEmail, 'STORE001'),
).rejects.toThrow('Email muito longo'); ).rejects.toThrow('Email muito longo');
}); });
@@ -307,7 +439,13 @@ describe('AuthService - createToken', () => {
const sqlInjection = "admin'; DROP TABLE users; --"; const sqlInjection = "admin'; DROP TABLE users; --";
await expect( await expect(
context.service.createToken(1, 100, sqlInjection, 'test@test.com', 'STORE001') context.service.createToken(
1,
100,
sqlInjection,
'test@test.com',
'STORE001',
),
).rejects.toThrow('Nome de usuário contém caracteres inválidos'); ).rejects.toThrow('Nome de usuário contém caracteres inválidos');
}); });
@@ -315,9 +453,8 @@ describe('AuthService - createToken', () => {
const invalidEmail = 'test@@example.com'; const invalidEmail = 'test@@example.com';
await expect( await expect(
context.service.createToken(1, 100, 'test', invalidEmail, 'STORE001') context.service.createToken(1, 100, 'test', invalidEmail, 'STORE001'),
).rejects.toThrow('Formato de email inválido'); ).rejects.toThrow('Formato de email inválido');
}); });
}); });
}); });

View File

@@ -14,7 +14,7 @@ describe('AuthService - createTokenPair', () => {
describe('createTokenPair - Tests that expose problems', () => { describe('createTokenPair - Tests that expose problems', () => {
/** /**
* NOTA: Estes testes identificam problemas no método createTokenPair. * NOTA: Estes testes identificam problemas no método createTokenPair.
* *
* PROBLEMAS IDENTIFICADOS: * PROBLEMAS IDENTIFICADOS:
* 1. Não há rollback se um token é criado mas o outro falha * 1. Não há rollback se um token é criado mas o outro falha
* 2. Não valida se os tokens foram realmente gerados * 2. Não valida se os tokens foram realmente gerados
@@ -25,7 +25,9 @@ describe('AuthService - createTokenPair', () => {
beforeEach(() => { beforeEach(() => {
context.mockJwtService.sign.mockReturnValue('mock.access.token'); context.mockJwtService.sign.mockReturnValue('mock.access.token');
context.mockRefreshTokenService.generateRefreshToken.mockResolvedValue('mock.refresh.token'); context.mockRefreshTokenService.generateRefreshToken.mockResolvedValue(
'mock.refresh.token',
);
}); });
it('should handle error when createToken fails after refresh token is generated', async () => { it('should handle error when createToken fails after refresh token is generated', async () => {
@@ -39,10 +41,19 @@ describe('AuthService - createTokenPair', () => {
}); });
await expect( await expect(
context.service.createTokenPair(1, 100, 'test', 'test@test.com', 'STORE001', 'session-123') context.service.createTokenPair(
1,
100,
'test',
'test@test.com',
'STORE001',
'session-123',
),
).rejects.toThrow(); ).rejects.toThrow();
expect(context.mockRefreshTokenService.generateRefreshToken).not.toHaveBeenCalled(); expect(
context.mockRefreshTokenService.generateRefreshToken,
).not.toHaveBeenCalled();
}); });
it('should rollback access token if refresh token generation fails', async () => { it('should rollback access token if refresh token generation fails', async () => {
@@ -52,11 +63,18 @@ describe('AuthService - createTokenPair', () => {
* Solução esperada: Invalidar o access token ou garantir atomicidade. * Solução esperada: Invalidar o access token ou garantir atomicidade.
*/ */
context.mockRefreshTokenService.generateRefreshToken.mockRejectedValueOnce( context.mockRefreshTokenService.generateRefreshToken.mockRejectedValueOnce(
new Error('Falha ao gerar refresh token') new Error('Falha ao gerar refresh token'),
); );
await expect( await expect(
context.service.createTokenPair(1, 100, 'test', 'test@test.com', 'STORE001', 'session-123') context.service.createTokenPair(
1,
100,
'test',
'test@test.com',
'STORE001',
'session-123',
),
).rejects.toThrow('Falha ao gerar refresh token'); ).rejects.toThrow('Falha ao gerar refresh token');
}); });
@@ -69,7 +87,13 @@ describe('AuthService - createTokenPair', () => {
context.mockJwtService.sign.mockReturnValue(''); context.mockJwtService.sign.mockReturnValue('');
await expect( await expect(
context.service.createTokenPair(1, 100, 'test', 'test@test.com', 'STORE001') context.service.createTokenPair(
1,
100,
'test',
'test@test.com',
'STORE001',
),
).rejects.toThrow('Token de acesso inválido gerado'); ).rejects.toThrow('Token de acesso inválido gerado');
}); });
@@ -79,18 +103,34 @@ describe('AuthService - createTokenPair', () => {
* Problema: Método não valida o retorno. * Problema: Método não valida o retorno.
* Solução esperada: Lançar exceção se token for inválido. * Solução esperada: Lançar exceção se token for inválido.
*/ */
context.mockRefreshTokenService.generateRefreshToken.mockResolvedValue(''); context.mockRefreshTokenService.generateRefreshToken.mockResolvedValue(
'',
);
await expect( await expect(
context.service.createTokenPair(1, 100, 'test', 'test@test.com', 'STORE001') context.service.createTokenPair(
1,
100,
'test',
'test@test.com',
'STORE001',
),
).rejects.toThrow('Refresh token inválido gerado'); ).rejects.toThrow('Refresh token inválido gerado');
}); });
it('should validate that refresh token is not null', async () => { it('should validate that refresh token is not null', async () => {
context.mockRefreshTokenService.generateRefreshToken.mockResolvedValue(null); context.mockRefreshTokenService.generateRefreshToken.mockResolvedValue(
null,
);
await expect( await expect(
context.service.createTokenPair(1, 100, 'test', 'test@test.com', 'STORE001') context.service.createTokenPair(
1,
100,
'test',
'test@test.com',
'STORE001',
),
).rejects.toThrow('Refresh token inválido gerado'); ).rejects.toThrow('Refresh token inválido gerado');
}); });
@@ -101,18 +141,26 @@ describe('AuthService - createTokenPair', () => {
* Solução esperada: Access token sempre primeiro. * Solução esperada: Access token sempre primeiro.
*/ */
const callOrder = []; const callOrder = [];
context.mockJwtService.sign.mockImplementation(() => { context.mockJwtService.sign.mockImplementation(() => {
callOrder.push('accessToken'); callOrder.push('accessToken');
return 'mock.access.token'; return 'mock.access.token';
}); });
context.mockRefreshTokenService.generateRefreshToken.mockImplementation(async () => {
callOrder.push('refreshToken');
return 'mock.refresh.token';
});
await context.service.createTokenPair(1, 100, 'test', 'test@test.com', 'STORE001'); context.mockRefreshTokenService.generateRefreshToken.mockImplementation(
async () => {
callOrder.push('refreshToken');
return 'mock.refresh.token';
},
);
await context.service.createTokenPair(
1,
100,
'test',
'test@test.com',
'STORE001',
);
expect(callOrder).toEqual(['accessToken', 'refreshToken']); expect(callOrder).toEqual(['accessToken', 'refreshToken']);
}); });
@@ -123,7 +171,13 @@ describe('AuthService - createTokenPair', () => {
* Problema: Cliente pode não saber quando renovar o token. * Problema: Cliente pode não saber quando renovar o token.
* Solução esperada: Sempre retornar um número positivo válido. * Solução esperada: Sempre retornar um número positivo válido.
*/ */
const result = await context.service.createTokenPair(1, 100, 'test', 'test@test.com', 'STORE001'); const result = await context.service.createTokenPair(
1,
100,
'test',
'test@test.com',
'STORE001',
);
expect(result.expiresIn).toBeGreaterThan(0); expect(result.expiresIn).toBeGreaterThan(0);
expect(typeof result.expiresIn).toBe('number'); expect(typeof result.expiresIn).toBe('number');
@@ -135,7 +189,7 @@ describe('AuthService - createTokenPair', () => {
* Cenário: Múltiplas chamadas simultâneas para o mesmo usuário. * Cenário: Múltiplas chamadas simultâneas para o mesmo usuário.
* Problema: Pode criar múltiplos pares de tokens inconsistentes. * Problema: Pode criar múltiplos pares de tokens inconsistentes.
* Solução esperada: JWT service gera tokens únicos com timestamps diferentes. * Solução esperada: JWT service gera tokens únicos com timestamps diferentes.
* *
* Nota: Na implementação real, o JWT service inclui timestamp e outros dados * Nota: Na implementação real, o JWT service inclui timestamp e outros dados
* que garantem unicidade. Aqui simulamos isso no mock. * que garantem unicidade. Aqui simulamos isso no mock.
*/ */
@@ -145,19 +199,42 @@ describe('AuthService - createTokenPair', () => {
return `mock.access.token.${callCount}`; return `mock.access.token.${callCount}`;
}); });
context.mockRefreshTokenService.generateRefreshToken.mockImplementation(async () => { context.mockRefreshTokenService.generateRefreshToken.mockImplementation(
return `mock.refresh.token.${Math.random()}`; async () => {
}); return `mock.refresh.token.${Math.random()}`;
},
);
const promises = [ const promises = [
context.service.createTokenPair(1, 100, 'test', 'test@test.com', 'STORE001', 'session-1'), context.service.createTokenPair(
context.service.createTokenPair(1, 100, 'test', 'test@test.com', 'STORE001', 'session-2'), 1,
context.service.createTokenPair(1, 100, 'test', 'test@test.com', 'STORE001', 'session-3'), 100,
'test',
'test@test.com',
'STORE001',
'session-1',
),
context.service.createTokenPair(
1,
100,
'test',
'test@test.com',
'STORE001',
'session-2',
),
context.service.createTokenPair(
1,
100,
'test',
'test@test.com',
'STORE001',
'session-3',
),
]; ];
const results = await Promise.all(promises); const results = await Promise.all(promises);
const uniqueTokens = new Set(results.map(r => r.accessToken)); const uniqueTokens = new Set(results.map((r) => r.accessToken));
expect(uniqueTokens.size).toBe(3); expect(uniqueTokens.size).toBe(3);
}); });
@@ -168,10 +245,18 @@ describe('AuthService - createTokenPair', () => {
* Solução esperada: Falhar rápido com mensagem clara. * Solução esperada: Falhar rápido com mensagem clara.
*/ */
await expect( await expect(
context.service.createTokenPair(-1, 100, 'test', 'test@test.com', 'STORE001') context.service.createTokenPair(
-1,
100,
'test',
'test@test.com',
'STORE001',
),
).rejects.toThrow('ID de usuário inválido'); ).rejects.toThrow('ID de usuário inválido');
expect(context.mockRefreshTokenService.generateRefreshToken).not.toHaveBeenCalled(); expect(
context.mockRefreshTokenService.generateRefreshToken,
).not.toHaveBeenCalled();
}); });
it('should not create refresh token if validation fails', async () => { it('should not create refresh token if validation fails', async () => {
@@ -181,11 +266,19 @@ describe('AuthService - createTokenPair', () => {
* Solução esperada: Validar tudo antes de criar qualquer token. * Solução esperada: Validar tudo antes de criar qualquer token.
*/ */
await expect( await expect(
context.service.createTokenPair(1, -1, 'test', 'test@test.com', 'STORE001') context.service.createTokenPair(
1,
-1,
'test',
'test@test.com',
'STORE001',
),
).rejects.toThrow('ID de vendedor inválido'); ).rejects.toThrow('ID de vendedor inválido');
expect(context.mockJwtService.sign).not.toHaveBeenCalled(); expect(context.mockJwtService.sign).not.toHaveBeenCalled();
expect(context.mockRefreshTokenService.generateRefreshToken).not.toHaveBeenCalled(); expect(
context.mockRefreshTokenService.generateRefreshToken,
).not.toHaveBeenCalled();
}); });
it('should handle undefined sessionId gracefully', async () => { it('should handle undefined sessionId gracefully', async () => {
@@ -194,11 +287,19 @@ describe('AuthService - createTokenPair', () => {
* Problema: Pode causar problemas ao gerar tokens sem session. * Problema: Pode causar problemas ao gerar tokens sem session.
* Solução esperada: Aceitar undefined e passar corretamente aos serviços. * Solução esperada: Aceitar undefined e passar corretamente aos serviços.
*/ */
const result = await context.service.createTokenPair(1, 100, 'test', 'test@test.com', 'STORE001'); const result = await context.service.createTokenPair(
1,
100,
'test',
'test@test.com',
'STORE001',
);
expect(result.accessToken).toBeDefined(); expect(result.accessToken).toBeDefined();
expect(result.refreshToken).toBeDefined(); expect(result.refreshToken).toBeDefined();
expect(context.mockRefreshTokenService.generateRefreshToken).toHaveBeenCalledWith(1, undefined); expect(
context.mockRefreshTokenService.generateRefreshToken,
).toHaveBeenCalledWith(1, undefined);
}); });
it('should include all required fields in return object', async () => { it('should include all required fields in return object', async () => {
@@ -207,13 +308,55 @@ describe('AuthService - createTokenPair', () => {
* Problema: Pode faltar campos ou ter campos extras. * Problema: Pode faltar campos ou ter campos extras.
* Solução esperada: Sempre retornar accessToken, refreshToken e expiresIn. * Solução esperada: Sempre retornar accessToken, refreshToken e expiresIn.
*/ */
const result = await context.service.createTokenPair(1, 100, 'test', 'test@test.com', 'STORE001'); const result = await context.service.createTokenPair(
1,
100,
'test',
'test@test.com',
'STORE001',
);
expect(result).toHaveProperty('accessToken'); expect(result).toHaveProperty('accessToken');
expect(result).toHaveProperty('refreshToken'); expect(result).toHaveProperty('refreshToken');
expect(result).toHaveProperty('expiresIn'); expect(result).toHaveProperty('expiresIn');
expect(Object.keys(result).length).toBe(3); expect(Object.keys(result).length).toBe(3);
}); });
it('should create token pair with null seller ID', async () => {
/**
* Cenário: Usuário sem sellerId (CODUSUR NULL no banco).
* Problema: Validação anterior rejeitava null sellerId.
* Solução esperada: Aceitar null sellerId e criar tokens normalmente.
*/
const result = await context.service.createTokenPair(
1427,
null,
'BRUNELLE BENILDA GAMA COSTA',
'BRUNELLE.C@JURUNENSE.COM.BR',
'12',
'session-null-seller',
);
expect(result).toHaveProperty('accessToken');
expect(result).toHaveProperty('refreshToken');
expect(result).toHaveProperty('expiresIn');
expect(result.expiresIn).toBe(28800); // 8 horas em segundos
expect(context.mockJwtService.sign).toHaveBeenCalledWith(
{
id: 1427,
sellerId: null,
storeId: '12',
username: 'BRUNELLE BENILDA GAMA COSTA',
email: 'BRUNELLE.C@JURUNENSE.COM.BR',
sessionId: 'session-null-seller',
},
{ expiresIn: '8h' },
);
expect(
context.mockRefreshTokenService.generateRefreshToken,
).toHaveBeenCalledWith(1427, 'session-null-seller');
});
}); });
}); });

View File

@@ -13,8 +13,12 @@ describe('AuthService - logout', () => {
storeId: 'STORE001', storeId: 'STORE001',
sessionId: 'session-123', sessionId: 'session-123',
}); });
context.mockTokenBlacklistService.addToBlacklist.mockResolvedValue(undefined); context.mockTokenBlacklistService.addToBlacklist.mockResolvedValue(
context.mockSessionManagementService.terminateSession.mockResolvedValue(undefined); undefined,
);
context.mockSessionManagementService.terminateSession.mockResolvedValue(
undefined,
);
}); });
afterEach(() => { afterEach(() => {
@@ -24,7 +28,7 @@ describe('AuthService - logout', () => {
describe('logout - Tests that expose problems', () => { describe('logout - Tests that expose problems', () => {
/** /**
* NOTA: Estes testes identificam problemas no método logout. * NOTA: Estes testes identificam problemas no método logout.
* *
* PROBLEMAS IDENTIFICADOS: * PROBLEMAS IDENTIFICADOS:
* 1. Não valida token de entrada (vazio, null, undefined) * 1. Não valida token de entrada (vazio, null, undefined)
* 2. Não valida se token foi decodificado corretamente * 2. Não valida se token foi decodificado corretamente
@@ -37,66 +41,76 @@ describe('AuthService - logout', () => {
*/ */
it('should reject empty token', async () => { it('should reject empty token', async () => {
await expect( await expect(context.service.logout('')).rejects.toThrow(
context.service.logout('') 'Token não pode estar vazio',
).rejects.toThrow('Token não pode estar vazio'); );
expect(context.mockJwtService.decode).not.toHaveBeenCalled(); expect(context.mockJwtService.decode).not.toHaveBeenCalled();
expect(context.mockTokenBlacklistService.addToBlacklist).not.toHaveBeenCalled(); expect(
context.mockTokenBlacklistService.addToBlacklist,
).not.toHaveBeenCalled();
}); });
it('should reject null token', async () => { it('should reject null token', async () => {
await expect( await expect(context.service.logout(null as any)).rejects.toThrow(
context.service.logout(null as any) 'Token não pode estar vazio',
).rejects.toThrow('Token não pode estar vazio'); );
expect(context.mockJwtService.decode).not.toHaveBeenCalled(); expect(context.mockJwtService.decode).not.toHaveBeenCalled();
expect(context.mockTokenBlacklistService.addToBlacklist).not.toHaveBeenCalled(); expect(
context.mockTokenBlacklistService.addToBlacklist,
).not.toHaveBeenCalled();
}); });
it('should reject undefined token', async () => { it('should reject undefined token', async () => {
await expect( await expect(context.service.logout(undefined as any)).rejects.toThrow(
context.service.logout(undefined as any) 'Token não pode estar vazio',
).rejects.toThrow('Token não pode estar vazio'); );
expect(context.mockJwtService.decode).not.toHaveBeenCalled(); expect(context.mockJwtService.decode).not.toHaveBeenCalled();
expect(context.mockTokenBlacklistService.addToBlacklist).not.toHaveBeenCalled(); expect(
context.mockTokenBlacklistService.addToBlacklist,
).not.toHaveBeenCalled();
}); });
it('should reject whitespace-only token', async () => { it('should reject whitespace-only token', async () => {
await expect( await expect(context.service.logout(' ')).rejects.toThrow(
context.service.logout(' ') 'Token não pode estar vazio',
).rejects.toThrow('Token não pode estar vazio'); );
expect(context.mockJwtService.decode).not.toHaveBeenCalled(); expect(context.mockJwtService.decode).not.toHaveBeenCalled();
expect(context.mockTokenBlacklistService.addToBlacklist).not.toHaveBeenCalled(); expect(
context.mockTokenBlacklistService.addToBlacklist,
).not.toHaveBeenCalled();
}); });
it('should reject extremely long tokens (DoS prevention)', async () => { it('should reject extremely long tokens (DoS prevention)', async () => {
const hugeToken = 'a'.repeat(100000); const hugeToken = 'a'.repeat(100000);
await expect( await expect(context.service.logout(hugeToken)).rejects.toThrow(
context.service.logout(hugeToken) 'Token muito longo',
).rejects.toThrow('Token muito longo'); );
expect(context.mockJwtService.decode).not.toHaveBeenCalled(); expect(context.mockJwtService.decode).not.toHaveBeenCalled();
expect(context.mockTokenBlacklistService.addToBlacklist).not.toHaveBeenCalled(); expect(
context.mockTokenBlacklistService.addToBlacklist,
).not.toHaveBeenCalled();
}); });
it('should validate decoded token is not null', async () => { it('should validate decoded token is not null', async () => {
context.mockJwtService.decode.mockReturnValue(null); context.mockJwtService.decode.mockReturnValue(null);
await expect( await expect(context.service.logout('invalid.token')).rejects.toThrow(
context.service.logout('invalid.token') 'Token inválido ou não pode ser decodificado',
).rejects.toThrow('Token inválido ou não pode ser decodificado'); );
}); });
it('should validate decoded token has required fields', async () => { it('should validate decoded token has required fields', async () => {
context.mockJwtService.decode.mockReturnValue({} as any); context.mockJwtService.decode.mockReturnValue({} as any);
await expect( await expect(context.service.logout('incomplete.token')).rejects.toThrow(
context.service.logout('incomplete.token') 'Token inválido ou não pode ser decodificado',
).rejects.toThrow('Token inválido ou não pode ser decodificado'); );
}); });
it('should not add token to blacklist if already blacklisted', async () => { it('should not add token to blacklist if already blacklisted', async () => {
@@ -104,7 +118,9 @@ describe('AuthService - logout', () => {
await context.service.logout('already.blacklisted.token'); await context.service.logout('already.blacklisted.token');
expect(context.mockTokenBlacklistService.addToBlacklist).not.toHaveBeenCalled(); expect(
context.mockTokenBlacklistService.addToBlacklist,
).not.toHaveBeenCalled();
}); });
it('should validate session exists before terminating', async () => { it('should validate session exists before terminating', async () => {
@@ -114,11 +130,11 @@ describe('AuthService - logout', () => {
} as any); } as any);
context.mockSessionManagementService.terminateSession.mockRejectedValue( context.mockSessionManagementService.terminateSession.mockRejectedValue(
new Error('Sessão não encontrada') new Error('Sessão não encontrada'),
); );
await expect( await expect(
context.service.logout('token.with.invalid.session') context.service.logout('token.with.invalid.session'),
).rejects.toThrow('Sessão não encontrada'); ).rejects.toThrow('Sessão não encontrada');
}); });
@@ -128,16 +144,16 @@ describe('AuthService - logout', () => {
}); });
await expect( await expect(
context.service.logout('invalid.token.format') context.service.logout('invalid.token.format'),
).rejects.toThrow('Token inválido ou não pode ser decodificado'); ).rejects.toThrow('Token inválido ou não pode ser decodificado');
}); });
it('should sanitize token input', async () => { it('should sanitize token input', async () => {
const maliciousToken = "'; DROP TABLE users; --"; const maliciousToken = "'; DROP TABLE users; --";
await expect( await expect(context.service.logout(maliciousToken)).rejects.toThrow(
context.service.logout(maliciousToken) 'Formato de token inválido',
).rejects.toThrow('Formato de token inválido'); );
expect(context.mockJwtService.decode).not.toHaveBeenCalled(); expect(context.mockJwtService.decode).not.toHaveBeenCalled();
}); });
@@ -149,7 +165,7 @@ describe('AuthService - logout', () => {
} as any); } as any);
await expect( await expect(
context.service.logout('token.with.invalid.id') context.service.logout('token.with.invalid.id'),
).rejects.toThrow('ID de usuário inválido no token'); ).rejects.toThrow('ID de usuário inválido no token');
}); });
@@ -161,7 +177,9 @@ describe('AuthService - logout', () => {
await context.service.logout('token.with.empty.sessionid'); await context.service.logout('token.with.empty.sessionid');
expect(context.mockSessionManagementService.terminateSession).not.toHaveBeenCalled(); expect(
context.mockSessionManagementService.terminateSession,
).not.toHaveBeenCalled();
}); });
it('should complete logout even if session termination fails', async () => { it('should complete logout even if session termination fails', async () => {
@@ -172,23 +190,27 @@ describe('AuthService - logout', () => {
context.mockTokenBlacklistService.isBlacklisted.mockResolvedValue(false); context.mockTokenBlacklistService.isBlacklisted.mockResolvedValue(false);
context.mockSessionManagementService.terminateSession.mockRejectedValue( context.mockSessionManagementService.terminateSession.mockRejectedValue(
new Error('Falha ao terminar sessão') new Error('Falha ao terminar sessão'),
); );
await context.service.logout('valid.token'); await context.service.logout('valid.token');
expect(context.mockTokenBlacklistService.addToBlacklist).toHaveBeenCalledWith('valid.token'); expect(
context.mockTokenBlacklistService.addToBlacklist,
).toHaveBeenCalledWith('valid.token');
}); });
it('should not throw if token is already blacklisted', async () => { it('should not throw if token is already blacklisted', async () => {
context.mockTokenBlacklistService.isBlacklisted.mockResolvedValue(true); context.mockTokenBlacklistService.isBlacklisted.mockResolvedValue(true);
context.mockTokenBlacklistService.addToBlacklist.mockRejectedValue( context.mockTokenBlacklistService.addToBlacklist.mockRejectedValue(
new Error('Token já está na blacklist') new Error('Token já está na blacklist'),
); );
await context.service.logout('already.blacklisted.token'); await context.service.logout('already.blacklisted.token');
expect(context.mockTokenBlacklistService.addToBlacklist).not.toHaveBeenCalled(); expect(
context.mockTokenBlacklistService.addToBlacklist,
).not.toHaveBeenCalled();
}); });
it('should validate token format before decoding', async () => { it('should validate token format before decoding', async () => {
@@ -214,7 +236,9 @@ describe('AuthService - logout', () => {
await Promise.all(promises); await Promise.all(promises);
expect(context.mockTokenBlacklistService.addToBlacklist).toHaveBeenCalledTimes(3); expect(
context.mockTokenBlacklistService.addToBlacklist,
).toHaveBeenCalledTimes(3);
}); });
it('should validate decoded payload structure', async () => { it('should validate decoded payload structure', async () => {
@@ -223,11 +247,15 @@ describe('AuthService - logout', () => {
} as any); } as any);
await expect( await expect(
context.service.logout('token.with.invalid.structure') context.service.logout('token.with.invalid.structure'),
).rejects.toThrow('Token inválido ou não pode ser decodificado'); ).rejects.toThrow('Token inválido ou não pode ser decodificado');
expect(context.mockSessionManagementService.terminateSession).not.toHaveBeenCalled(); expect(
expect(context.mockTokenBlacklistService.addToBlacklist).not.toHaveBeenCalled(); context.mockSessionManagementService.terminateSession,
).not.toHaveBeenCalled();
expect(
context.mockTokenBlacklistService.addToBlacklist,
).not.toHaveBeenCalled();
}); });
it('should ensure token is always blacklisted on success', async () => { it('should ensure token is always blacklisted on success', async () => {
@@ -235,8 +263,12 @@ describe('AuthService - logout', () => {
await context.service.logout('valid.token'); await context.service.logout('valid.token');
expect(context.mockTokenBlacklistService.addToBlacklist).toHaveBeenCalledWith('valid.token'); expect(
expect(context.mockTokenBlacklistService.addToBlacklist).toHaveBeenCalledTimes(1); context.mockTokenBlacklistService.addToBlacklist,
).toHaveBeenCalledWith('valid.token');
expect(
context.mockTokenBlacklistService.addToBlacklist,
).toHaveBeenCalledTimes(1);
}); });
it('should handle race condition when token becomes blacklisted between check and add', async () => { it('should handle race condition when token becomes blacklisted between check and add', async () => {
@@ -248,13 +280,17 @@ describe('AuthService - logout', () => {
*/ */
context.mockTokenBlacklistService.isBlacklisted.mockResolvedValue(false); context.mockTokenBlacklistService.isBlacklisted.mockResolvedValue(false);
context.mockTokenBlacklistService.addToBlacklist.mockRejectedValue( context.mockTokenBlacklistService.addToBlacklist.mockRejectedValue(
new Error('Token já está na blacklist') new Error('Token já está na blacklist'),
); );
await context.service.logout('token.with.race.condition'); await context.service.logout('token.with.race.condition');
expect(context.mockTokenBlacklistService.isBlacklisted).toHaveBeenCalledWith('token.with.race.condition'); expect(
expect(context.mockTokenBlacklistService.addToBlacklist).toHaveBeenCalledWith('token.with.race.condition'); context.mockTokenBlacklistService.isBlacklisted,
).toHaveBeenCalledWith('token.with.race.condition');
expect(
context.mockTokenBlacklistService.addToBlacklist,
).toHaveBeenCalledWith('token.with.race.condition');
}); });
it('should throw error if addToBlacklist fails with non-blacklist error', async () => { it('should throw error if addToBlacklist fails with non-blacklist error', async () => {
@@ -265,15 +301,21 @@ describe('AuthService - logout', () => {
*/ */
context.mockTokenBlacklistService.isBlacklisted.mockResolvedValue(false); context.mockTokenBlacklistService.isBlacklisted.mockResolvedValue(false);
context.mockTokenBlacklistService.addToBlacklist.mockRejectedValue( context.mockTokenBlacklistService.addToBlacklist.mockRejectedValue(
new Error('Erro de conexão com Redis') new Error('Erro de conexão com Redis'),
); );
await expect( await expect(
context.service.logout('token.with.blacklist.error') context.service.logout('token.with.blacklist.error'),
).rejects.toThrow('Falha ao adicionar token à blacklist: Erro de conexão com Redis'); ).rejects.toThrow(
'Falha ao adicionar token à blacklist: Erro de conexão com Redis',
);
expect(context.mockTokenBlacklistService.isBlacklisted).toHaveBeenCalledWith('token.with.blacklist.error'); expect(
expect(context.mockTokenBlacklistService.addToBlacklist).toHaveBeenCalledWith('token.with.blacklist.error'); context.mockTokenBlacklistService.isBlacklisted,
).toHaveBeenCalledWith('token.with.blacklist.error');
expect(
context.mockTokenBlacklistService.addToBlacklist,
).toHaveBeenCalledWith('token.with.blacklist.error');
}); });
it('should verify isBlacklisted is called before addToBlacklist', async () => { it('should verify isBlacklisted is called before addToBlacklist', async () => {
@@ -286,11 +328,14 @@ describe('AuthService - logout', () => {
await context.service.logout('valid.token'); await context.service.logout('valid.token');
const isBlacklistedCallOrder = context.mockTokenBlacklistService.isBlacklisted.mock.invocationCallOrder[0]; const isBlacklistedCallOrder =
const addToBlacklistCallOrder = context.mockTokenBlacklistService.addToBlacklist.mock.invocationCallOrder[0]; context.mockTokenBlacklistService.isBlacklisted.mock
.invocationCallOrder[0];
const addToBlacklistCallOrder =
context.mockTokenBlacklistService.addToBlacklist.mock
.invocationCallOrder[0];
expect(isBlacklistedCallOrder).toBeLessThan(addToBlacklistCallOrder); expect(isBlacklistedCallOrder).toBeLessThan(addToBlacklistCallOrder);
}); });
}); });
}); });

View File

@@ -19,7 +19,9 @@ describe('AuthService - refreshAccessToken', () => {
situacao: 'A', situacao: 'A',
dataDesligamento: null, dataDesligamento: null,
}); });
context.mockSessionManagementService.isSessionActive.mockResolvedValue(true); context.mockSessionManagementService.isSessionActive.mockResolvedValue(
true,
);
}); });
afterEach(() => { afterEach(() => {
@@ -29,7 +31,7 @@ describe('AuthService - refreshAccessToken', () => {
describe('refreshAccessToken - Tests that expose problems', () => { describe('refreshAccessToken - Tests that expose problems', () => {
/** /**
* NOTA: Estes testes identificam problemas no método refreshAccessToken. * NOTA: Estes testes identificam problemas no método refreshAccessToken.
* *
* PROBLEMAS IDENTIFICADOS: * PROBLEMAS IDENTIFICADOS:
* 1. Não valida refresh token antes de processar * 1. Não valida refresh token antes de processar
* 2. Não valida dados retornados pelo refresh token service * 2. Não valida dados retornados pelo refresh token service
@@ -40,35 +42,43 @@ describe('AuthService - refreshAccessToken', () => {
*/ */
it('should reject empty refresh token', async () => { it('should reject empty refresh token', async () => {
await expect( await expect(context.service.refreshAccessToken('')).rejects.toThrow(
context.service.refreshAccessToken('') 'Refresh token não pode estar vazio',
).rejects.toThrow('Refresh token não pode estar vazio'); );
expect(context.mockRefreshTokenService.validateRefreshToken).not.toHaveBeenCalled(); expect(
context.mockRefreshTokenService.validateRefreshToken,
).not.toHaveBeenCalled();
}); });
it('should reject null refresh token', async () => { it('should reject null refresh token', async () => {
await expect( await expect(
context.service.refreshAccessToken(null as any) context.service.refreshAccessToken(null as any),
).rejects.toThrow('Refresh token não pode estar vazio'); ).rejects.toThrow('Refresh token não pode estar vazio');
expect(context.mockRefreshTokenService.validateRefreshToken).not.toHaveBeenCalled(); expect(
context.mockRefreshTokenService.validateRefreshToken,
).not.toHaveBeenCalled();
}); });
it('should reject undefined refresh token', async () => { it('should reject undefined refresh token', async () => {
await expect( await expect(
context.service.refreshAccessToken(undefined as any) context.service.refreshAccessToken(undefined as any),
).rejects.toThrow('Refresh token não pode estar vazio'); ).rejects.toThrow('Refresh token não pode estar vazio');
expect(context.mockRefreshTokenService.validateRefreshToken).not.toHaveBeenCalled(); expect(
context.mockRefreshTokenService.validateRefreshToken,
).not.toHaveBeenCalled();
}); });
it('should reject whitespace-only refresh token', async () => { it('should reject whitespace-only refresh token', async () => {
await expect( await expect(context.service.refreshAccessToken(' ')).rejects.toThrow(
context.service.refreshAccessToken(' ') 'Refresh token não pode estar vazio',
).rejects.toThrow('Refresh token não pode estar vazio'); );
expect(context.mockRefreshTokenService.validateRefreshToken).not.toHaveBeenCalled(); expect(
context.mockRefreshTokenService.validateRefreshToken,
).not.toHaveBeenCalled();
}); });
it('should validate tokenData has required id field', async () => { it('should validate tokenData has required id field', async () => {
@@ -77,15 +87,17 @@ describe('AuthService - refreshAccessToken', () => {
} as any); } as any);
await expect( await expect(
context.service.refreshAccessToken('valid.refresh.token') context.service.refreshAccessToken('valid.refresh.token'),
).rejects.toThrow('Dados do refresh token inválidos'); ).rejects.toThrow('Dados do refresh token inválidos');
}); });
it('should validate tokenData is not null', async () => { it('should validate tokenData is not null', async () => {
context.mockRefreshTokenService.validateRefreshToken.mockResolvedValue(null); context.mockRefreshTokenService.validateRefreshToken.mockResolvedValue(
null,
);
await expect( await expect(
context.service.refreshAccessToken('valid.refresh.token') context.service.refreshAccessToken('valid.refresh.token'),
).rejects.toThrow('Dados do refresh token inválidos'); ).rejects.toThrow('Dados do refresh token inválidos');
}); });
@@ -101,7 +113,7 @@ describe('AuthService - refreshAccessToken', () => {
}); });
await expect( await expect(
context.service.refreshAccessToken('valid.refresh.token') context.service.refreshAccessToken('valid.refresh.token'),
).rejects.toThrow('Dados do usuário incompletos'); ).rejects.toThrow('Dados do usuário incompletos');
}); });
@@ -117,7 +129,7 @@ describe('AuthService - refreshAccessToken', () => {
}); });
await expect( await expect(
context.service.refreshAccessToken('valid.refresh.token') context.service.refreshAccessToken('valid.refresh.token'),
).rejects.toThrow('Dados do usuário incompletos'); ).rejects.toThrow('Dados do usuário incompletos');
}); });
@@ -133,7 +145,7 @@ describe('AuthService - refreshAccessToken', () => {
}); });
await expect( await expect(
context.service.refreshAccessToken('valid.refresh.token') context.service.refreshAccessToken('valid.refresh.token'),
).rejects.toThrow('Dados do usuário incompletos'); ).rejects.toThrow('Dados do usuário incompletos');
}); });
@@ -141,7 +153,7 @@ describe('AuthService - refreshAccessToken', () => {
context.mockJwtService.sign.mockReturnValue(''); context.mockJwtService.sign.mockReturnValue('');
await expect( await expect(
context.service.refreshAccessToken('valid.refresh.token') context.service.refreshAccessToken('valid.refresh.token'),
).rejects.toThrow('Falha ao gerar novo token de acesso'); ).rejects.toThrow('Falha ao gerar novo token de acesso');
}); });
@@ -149,7 +161,7 @@ describe('AuthService - refreshAccessToken', () => {
context.mockJwtService.sign.mockReturnValue(null as any); context.mockJwtService.sign.mockReturnValue(null as any);
await expect( await expect(
context.service.refreshAccessToken('valid.refresh.token') context.service.refreshAccessToken('valid.refresh.token'),
).rejects.toThrow('Falha ao gerar novo token de acesso'); ).rejects.toThrow('Falha ao gerar novo token de acesso');
}); });
@@ -159,10 +171,12 @@ describe('AuthService - refreshAccessToken', () => {
sessionId: 'expired-session', sessionId: 'expired-session',
}); });
context.mockSessionManagementService.isSessionActive = jest.fn().mockResolvedValue(false); context.mockSessionManagementService.isSessionActive = jest
.fn()
.mockResolvedValue(false);
await expect( await expect(
context.service.refreshAccessToken('valid.refresh.token') context.service.refreshAccessToken('valid.refresh.token'),
).rejects.toThrow('Sessão não está mais ativa'); ).rejects.toThrow('Sessão não está mais ativa');
}); });
@@ -178,7 +192,7 @@ describe('AuthService - refreshAccessToken', () => {
}); });
await expect( await expect(
context.service.refreshAccessToken('valid.refresh.token') context.service.refreshAccessToken('valid.refresh.token'),
).rejects.toThrow('ID de vendedor inválido'); ).rejects.toThrow('ID de vendedor inválido');
}); });
@@ -186,24 +200,30 @@ describe('AuthService - refreshAccessToken', () => {
const hugeToken = 'a'.repeat(100000); const hugeToken = 'a'.repeat(100000);
await expect( await expect(
context.service.refreshAccessToken(hugeToken) context.service.refreshAccessToken(hugeToken),
).rejects.toThrow('Refresh token muito longo'); ).rejects.toThrow('Refresh token muito longo');
expect(context.mockRefreshTokenService.validateRefreshToken).not.toHaveBeenCalled(); expect(
context.mockRefreshTokenService.validateRefreshToken,
).not.toHaveBeenCalled();
}); });
it('should sanitize refresh token input', async () => { it('should sanitize refresh token input', async () => {
const maliciousToken = "'; DROP TABLE users; --"; const maliciousToken = "'; DROP TABLE users; --";
await expect( await expect(
context.service.refreshAccessToken(maliciousToken) context.service.refreshAccessToken(maliciousToken),
).rejects.toThrow('Formato de refresh token inválido'); ).rejects.toThrow('Formato de refresh token inválido');
expect(context.mockRefreshTokenService.validateRefreshToken).not.toHaveBeenCalled(); expect(
context.mockRefreshTokenService.validateRefreshToken,
).not.toHaveBeenCalled();
}); });
it('should include only required fields in response', async () => { it('should include only required fields in response', async () => {
const result = await context.service.refreshAccessToken('valid.refresh.token'); const result = await context.service.refreshAccessToken(
'valid.refresh.token',
);
expect(result).toHaveProperty('accessToken'); expect(result).toHaveProperty('accessToken');
expect(result).toHaveProperty('expiresIn'); expect(result).toHaveProperty('expiresIn');
@@ -213,7 +233,9 @@ describe('AuthService - refreshAccessToken', () => {
}); });
it('should validate expiresIn is correct', async () => { it('should validate expiresIn is correct', async () => {
const result = await context.service.refreshAccessToken('valid.refresh.token'); const result = await context.service.refreshAccessToken(
'valid.refresh.token',
);
expect(result.expiresIn).toBe(28800); expect(result.expiresIn).toBe(28800);
expect(result.expiresIn).toBeGreaterThan(0); expect(result.expiresIn).toBeGreaterThan(0);
@@ -231,7 +253,7 @@ describe('AuthService - refreshAccessToken', () => {
}); });
await expect( await expect(
context.service.refreshAccessToken('valid.refresh.token') context.service.refreshAccessToken('valid.refresh.token'),
).rejects.toThrow(); ).rejects.toThrow();
}); });
@@ -244,7 +266,7 @@ describe('AuthService - refreshAccessToken', () => {
const results = await Promise.all(promises); const results = await Promise.all(promises);
results.forEach(result => { results.forEach((result) => {
expect(result).toHaveProperty('accessToken'); expect(result).toHaveProperty('accessToken');
expect(result.accessToken).toBeTruthy(); expect(result.accessToken).toBeTruthy();
}); });
@@ -262,9 +284,8 @@ describe('AuthService - refreshAccessToken', () => {
}); });
await expect( await expect(
context.service.refreshAccessToken('valid.refresh.token') context.service.refreshAccessToken('valid.refresh.token'),
).rejects.toThrow('Usuário inválido ou inativo'); ).rejects.toThrow('Usuário inválido ou inativo');
}); });
}); });
}); });

View File

@@ -24,14 +24,17 @@ import { RateLimitingGuard } from '../guards/rate-limiting.guard';
import { RateLimitingService } from '../services/rate-limiting.service'; import { RateLimitingService } from '../services/rate-limiting.service';
import { RefreshTokenService } from '../services/refresh-token.service'; import { RefreshTokenService } from '../services/refresh-token.service';
import { SessionManagementService } from '../services/session-management.service'; import { SessionManagementService } from '../services/session-management.service';
import { RefreshTokenDto, RefreshTokenResponseDto } from './dto/refresh-token.dto'; import {
RefreshTokenDto,
RefreshTokenResponseDto,
} from './dto/refresh-token.dto';
import { SessionsResponseDto } from './dto/session.dto'; import { SessionsResponseDto } from './dto/session.dto';
import { LoginAuditService } from '../services/login-audit.service'; import { LoginAuditService } from '../services/login-audit.service';
import { import {
LoginAuditFiltersDto, LoginAuditFiltersDto,
LoginAuditResponseDto, LoginAuditResponseDto,
LoginStatsDto, LoginStatsDto,
LoginStatsFiltersDto LoginStatsFiltersDto,
} from './dto/login-audit.dto'; } from './dto/login-audit.dto';
import { import {
ApiTags, ApiTags,
@@ -66,9 +69,12 @@ export class AuthController {
}) })
@ApiUnauthorizedResponse({ description: 'Usuário ou senha inválidos' }) @ApiUnauthorizedResponse({ description: 'Usuário ou senha inválidos' })
@ApiTooManyRequestsResponse({ description: 'Muitas tentativas de login' }) @ApiTooManyRequestsResponse({ description: 'Muitas tentativas de login' })
async login(@Body() dto: LoginDto, @Request() req): Promise<LoginResponseDto> { async login(
@Body() dto: LoginDto,
@Request() req,
): Promise<LoginResponseDto> {
const ip = this.getClientIp(req); const ip = this.getClientIp(req);
const command = new AuthenticateUserCommand(dto.username, dto.password); const command = new AuthenticateUserCommand(dto.username, dto.password);
const result = await this.commandBus.execute(command); const result = await this.commandBus.execute(command);
@@ -76,7 +82,7 @@ export class AuthController {
if (!result.success) { if (!result.success) {
await this.rateLimitingService.recordAttempt(ip, false); await this.rateLimitingService.recordAttempt(ip, false);
await this.loginAuditService.logLoginAttempt({ await this.loginAuditService.logLoginAttempt({
username: dto.username, username: dto.username,
ipAddress: ip, ipAddress: ip,
@@ -84,7 +90,7 @@ export class AuthController {
success: false, success: false,
failureReason: result.error, failureReason: result.error,
}); });
throw new HttpException( throw new HttpException(
new ResultModel(false, result.error, null, result.error), new ResultModel(false, result.error, null, result.error),
HttpStatus.UNAUTHORIZED, HttpStatus.UNAUTHORIZED,
@@ -94,19 +100,23 @@ export class AuthController {
await this.rateLimitingService.recordAttempt(ip, true); await this.rateLimitingService.recordAttempt(ip, true);
const user = result.data; const user = result.data;
/** /**
* Verifica se o usuário já possui uma sessão ativa * Verifica se o usuário já possui uma sessão ativa
*/ */
const existingSession = await this.sessionManagementService.hasActiveSession(user.id); const existingSession =
await this.sessionManagementService.hasActiveSession(user.id);
if (existingSession) { if (existingSession) {
/** /**
* Encerra a sessão existente antes de criar uma nova * Encerra a sessão existente antes de criar uma nova
*/ */
await this.sessionManagementService.terminateSession(user.id, existingSession.sessionId); await this.sessionManagementService.terminateSession(
user.id,
existingSession.sessionId,
);
} }
const session = await this.sessionManagementService.createSession( const session = await this.sessionManagementService.createSession(
user.id, user.id,
ip, ip,
@@ -161,7 +171,6 @@ export class AuthController {
); );
} }
@Post('logout') @Post('logout')
@UseGuards(JwtAuthGuard) @UseGuards(JwtAuthGuard)
@ApiBearerAuth() @ApiBearerAuth()
@@ -170,10 +179,15 @@ export class AuthController {
@ApiUnauthorizedResponse({ description: 'Token inválido ou expirado' }) @ApiUnauthorizedResponse({ description: 'Token inválido ou expirado' })
async logout(@Request() req): Promise<{ message: string }> { async logout(@Request() req): Promise<{ message: string }> {
const token = req.headers.authorization?.replace('Bearer ', ''); const token = req.headers.authorization?.replace('Bearer ', '');
if (!token) { if (!token) {
throw new HttpException( throw new HttpException(
new ResultModel(false, 'Token não fornecido', null, 'Token não fornecido'), new ResultModel(
false,
'Token não fornecido',
null,
'Token não fornecido',
),
HttpStatus.UNAUTHORIZED, HttpStatus.UNAUTHORIZED,
); );
} }
@@ -192,8 +206,12 @@ export class AuthController {
description: 'Token renovado com sucesso', description: 'Token renovado com sucesso',
type: RefreshTokenResponseDto, type: RefreshTokenResponseDto,
}) })
@ApiUnauthorizedResponse({ description: 'Refresh token inválido ou expirado' }) @ApiUnauthorizedResponse({
async refreshToken(@Body() dto: RefreshTokenDto): Promise<RefreshTokenResponseDto> { description: 'Refresh token inválido ou expirado',
})
async refreshToken(
@Body() dto: RefreshTokenDto,
): Promise<RefreshTokenResponseDto> {
const result = await this.authService.refreshAccessToken(dto.refreshToken); const result = await this.authService.refreshAccessToken(dto.refreshToken);
return result; return result;
} }
@@ -210,15 +228,20 @@ export class AuthController {
async getSessions(@Request() req): Promise<SessionsResponseDto> { async getSessions(@Request() req): Promise<SessionsResponseDto> {
const userId = req.user.id; const userId = req.user.id;
const currentSessionId = req.user.sessionId; const currentSessionId = req.user.sessionId;
const sessions = await this.sessionManagementService.getActiveSessions(userId, currentSessionId); const sessions = await this.sessionManagementService.getActiveSessions(
userId,
currentSessionId,
);
return { return {
sessions: sessions.map(session => ({ sessions: sessions.map((session) => ({
sessionId: session.sessionId, sessionId: session.sessionId,
ipAddress: session.ipAddress, ipAddress: session.ipAddress,
userAgent: session.userAgent, userAgent: session.userAgent,
createdAt: DateUtil.toBrazilISOString(new Date(session.createdAt)), createdAt: DateUtil.toBrazilISOString(new Date(session.createdAt)),
lastActivity: DateUtil.toBrazilISOString(new Date(session.lastActivity)), lastActivity: DateUtil.toBrazilISOString(
new Date(session.lastActivity),
),
isCurrent: session.sessionId === currentSessionId, isCurrent: session.sessionId === currentSessionId,
})), })),
total: sessions.length, total: sessions.length,
@@ -238,7 +261,7 @@ export class AuthController {
): Promise<{ message: string }> { ): Promise<{ message: string }> {
const userId = req.user.id; const userId = req.user.id;
await this.sessionManagementService.terminateSession(userId, sessionId); await this.sessionManagementService.terminateSession(userId, sessionId);
return { return {
message: 'Sessão encerrada com sucesso', message: 'Sessão encerrada com sucesso',
}; };
@@ -253,7 +276,7 @@ export class AuthController {
async terminateAllSessions(@Request() req): Promise<{ message: string }> { async terminateAllSessions(@Request() req): Promise<{ message: string }> {
const userId = req.user.id; const userId = req.user.id;
await this.sessionManagementService.terminateAllSessions(userId); await this.sessionManagementService.terminateAllSessions(userId);
return { return {
message: 'Todas as sessões foram encerradas com sucesso', message: 'Todas as sessões foram encerradas com sucesso',
}; };
@@ -273,7 +296,7 @@ export class AuthController {
@Request() req, @Request() req,
): Promise<LoginAuditResponseDto> { ): Promise<LoginAuditResponseDto> {
const userId = req.user.id; const userId = req.user.id;
const auditFilters = { const auditFilters = {
...filters, ...filters,
userId: filters.userId || userId, userId: filters.userId || userId,
@@ -282,9 +305,9 @@ export class AuthController {
}; };
const logs = await this.loginAuditService.getLoginLogs(auditFilters); const logs = await this.loginAuditService.getLoginLogs(auditFilters);
return { return {
logs: logs.map(log => ({ logs: logs.map((log) => ({
...log, ...log,
timestamp: DateUtil.toBrazilISOString(log.timestamp), timestamp: DateUtil.toBrazilISOString(log.timestamp),
})), })),
@@ -309,12 +332,12 @@ export class AuthController {
): Promise<LoginStatsDto> { ): Promise<LoginStatsDto> {
const userId = req.user.id; const userId = req.user.id;
const days = filters.days || 7; const days = filters.days || 7;
const stats = await this.loginAuditService.getLoginStats( const stats = await this.loginAuditService.getLoginStats(
filters.userId || userId, filters.userId || userId,
days, days,
); );
return stats; return stats;
} }
@@ -333,13 +356,12 @@ export class AuthController {
ipAddress: { type: 'string' }, ipAddress: { type: 'string' },
userAgent: { type: 'string' }, userAgent: { type: 'string' },
createdAt: { type: 'string' }, createdAt: { type: 'string' },
lastActivity: { type: 'string' } lastActivity: { type: 'string' },
} },
} },
} },
} },
}) })
@Get('session/status') @Get('session/status')
async checkSessionStatus(@Query('username') username: string): Promise<{ async checkSessionStatus(@Query('username') username: string): Promise<{
hasActiveSession: boolean; hasActiveSession: boolean;
@@ -353,7 +375,12 @@ export class AuthController {
}> { }> {
if (!username) { if (!username) {
throw new HttpException( throw new HttpException(
new ResultModel(false, 'Username é obrigatório', null, 'Username é obrigatório'), new ResultModel(
false,
'Username é obrigatório',
null,
'Username é obrigatório',
),
HttpStatus.BAD_REQUEST, HttpStatus.BAD_REQUEST,
); );
} }
@@ -362,15 +389,17 @@ export class AuthController {
* Busca o usuário pelo username para obter o ID * Busca o usuário pelo username para obter o ID
*/ */
const user = await this.authService.findUserByUsername(username); const user = await this.authService.findUserByUsername(username);
if (!user) { if (!user) {
return { return {
hasActiveSession: false, hasActiveSession: false,
}; };
} }
const activeSession = await this.sessionManagementService.hasActiveSession(user.id); const activeSession = await this.sessionManagementService.hasActiveSession(
user.id,
);
if (!activeSession) { if (!activeSession) {
return { return {
hasActiveSession: false, hasActiveSession: false,
@@ -383,8 +412,12 @@ export class AuthController {
sessionId: activeSession.sessionId, sessionId: activeSession.sessionId,
ipAddress: activeSession.ipAddress, ipAddress: activeSession.ipAddress,
userAgent: activeSession.userAgent, userAgent: activeSession.userAgent,
createdAt: DateUtil.toBrazilISOString(new Date(activeSession.createdAt)), createdAt: DateUtil.toBrazilISOString(
lastActivity: DateUtil.toBrazilISOString(new Date(activeSession.lastActivity)), new Date(activeSession.createdAt),
),
lastActivity: DateUtil.toBrazilISOString(
new Date(activeSession.lastActivity),
),
}, },
}; };
} }

View File

@@ -35,14 +35,14 @@ import { LoginAuditService } from '../services/login-audit.service';
], ],
controllers: [AuthController], controllers: [AuthController],
providers: [ providers: [
AuthService, AuthService,
JwtStrategy, JwtStrategy,
TokenBlacklistService, TokenBlacklistService,
RateLimitingService, RateLimitingService,
RefreshTokenService, RefreshTokenService,
SessionManagementService, SessionManagementService,
LoginAuditService, LoginAuditService,
AuthenticateUserHandler AuthenticateUserHandler,
], ],
exports: [AuthService], exports: [AuthService],
}) })

View File

@@ -1,4 +1,8 @@
import { Injectable, UnauthorizedException, BadRequestException } from '@nestjs/common'; import {
Injectable,
UnauthorizedException,
BadRequestException,
} from '@nestjs/common';
import { JwtService, JwtSignOptions } from '@nestjs/jwt'; import { JwtService, JwtSignOptions } from '@nestjs/jwt';
import { UsersService } from '../users/users.service'; import { UsersService } from '../users/users.service';
import { JwtPayload } from '../models/jwt-payload.model'; import { JwtPayload } from '../models/jwt-payload.model';
@@ -7,7 +11,6 @@ import { TokenBlacklistService } from '../services/token-blacklist.service';
import { RefreshTokenService } from '../services/refresh-token.service'; import { RefreshTokenService } from '../services/refresh-token.service';
import { SessionManagementService } from '../services/session-management.service'; import { SessionManagementService } from '../services/session-management.service';
@Injectable() @Injectable()
export class AuthService { export class AuthService {
constructor( constructor(
@@ -23,7 +26,14 @@ export class AuthService {
* Cria um token JWT com validação de todos os parâmetros de entrada * Cria um token JWT com validação de todos os parâmetros de entrada
* @throws BadRequestException quando os parâmetros são inválidos * @throws BadRequestException quando os parâmetros são inválidos
*/ */
async createToken(id: number, sellerId: number, username: string, email: string, storeId: string, sessionId?: string) { async createToken(
id: number,
sellerId: number | null,
username: string,
email: string,
storeId: string,
sessionId?: string,
) {
this.validateTokenParameters(id, sellerId, username, email, storeId); this.validateTokenParameters(id, sellerId, username, email, storeId);
const user: JwtPayload = { const user: JwtPayload = {
@@ -42,12 +52,22 @@ export class AuthService {
* Valida os parâmetros de entrada para criação de token * Valida os parâmetros de entrada para criação de token
* @private * @private
*/ */
private validateTokenParameters(id: number, sellerId: number, username: string, email: string, storeId: string): void { private validateTokenParameters(
id: number,
sellerId: number | null,
username: string,
email: string,
storeId: string,
): void {
if (!id || id <= 0) { if (!id || id <= 0) {
throw new BadRequestException('ID de usuário inválido'); throw new BadRequestException('ID de usuário inválido');
} }
if (sellerId === null || sellerId === undefined || sellerId < 0) { if (
sellerId !== null &&
sellerId !== undefined &&
sellerId < 0
) {
throw new BadRequestException('ID de vendedor inválido'); throw new BadRequestException('ID de vendedor inválido');
} }
@@ -64,7 +84,9 @@ export class AuthService {
} }
if (/['";\\]/.test(username)) { if (/['";\\]/.test(username)) {
throw new BadRequestException('Nome de usuário contém caracteres inválidos'); throw new BadRequestException(
'Nome de usuário contém caracteres inválidos',
);
} }
if (!email || typeof email !== 'string' || !email.trim()) { if (!email || typeof email !== 'string' || !email.trim()) {
@@ -77,7 +99,7 @@ export class AuthService {
const emailRegex = /^[^\s@]+@[^\s@]+\.[^\s@]+$/; const emailRegex = /^[^\s@]+@[^\s@]+\.[^\s@]+$/;
const multipleAtSymbols = (email.match(/@/g) || []).length > 1; const multipleAtSymbols = (email.match(/@/g) || []).length > 1;
if (!emailRegex.test(email) || multipleAtSymbols) { if (!emailRegex.test(email) || multipleAtSymbols) {
throw new BadRequestException('Formato de email inválido'); throw new BadRequestException('Formato de email inválido');
} }
@@ -92,16 +114,41 @@ export class AuthService {
* @throws BadRequestException quando os parâmetros são inválidos * @throws BadRequestException quando os parâmetros são inválidos
* @throws Error quando os tokens gerados são inválidos * @throws Error quando os tokens gerados são inválidos
*/ */
async createTokenPair(id: number, sellerId: number, username: string, email: string, storeId: string, sessionId?: string) { async createTokenPair(
const accessToken = await this.createToken(id, sellerId, username, email, storeId, sessionId); id: number,
sellerId: number | null,
if (!accessToken || typeof accessToken !== 'string' || !accessToken.trim()) { username: string,
email: string,
storeId: string,
sessionId?: string,
) {
const accessToken = await this.createToken(
id,
sellerId,
username,
email,
storeId,
sessionId,
);
if (
!accessToken ||
typeof accessToken !== 'string' ||
!accessToken.trim()
) {
throw new Error('Token de acesso inválido gerado'); throw new Error('Token de acesso inválido gerado');
} }
const refreshToken = await this.refreshTokenService.generateRefreshToken(id, sessionId); const refreshToken = await this.refreshTokenService.generateRefreshToken(
id,
if (!refreshToken || typeof refreshToken !== 'string' || !refreshToken.trim()) { sessionId,
);
if (
!refreshToken ||
typeof refreshToken !== 'string' ||
!refreshToken.trim()
) {
throw new Error('Refresh token inválido gerado'); throw new Error('Refresh token inválido gerado');
} }
@@ -121,8 +168,10 @@ export class AuthService {
async refreshAccessToken(refreshToken: string) { async refreshAccessToken(refreshToken: string) {
this.validateRefreshTokenInput(refreshToken); this.validateRefreshTokenInput(refreshToken);
const tokenData = await this.refreshTokenService.validateRefreshToken(refreshToken); const tokenData = await this.refreshTokenService.validateRefreshToken(
refreshToken,
);
if (!tokenData || !tokenData.id) { if (!tokenData || !tokenData.id) {
throw new BadRequestException('Dados do refresh token inválidos'); throw new BadRequestException('Dados do refresh token inválidos');
} }
@@ -135,10 +184,11 @@ export class AuthService {
this.validateUserDataForToken(user); this.validateUserDataForToken(user);
if (tokenData.sessionId) { if (tokenData.sessionId) {
const isSessionActive = await this.sessionManagementService.isSessionActive( const isSessionActive =
user.id, await this.sessionManagementService.isSessionActive(
tokenData.sessionId user.id,
); tokenData.sessionId,
);
if (!isSessionActive) { if (!isSessionActive) {
throw new UnauthorizedException('Sessão não está mais ativa'); throw new UnauthorizedException('Sessão não está mais ativa');
} }
@@ -150,10 +200,14 @@ export class AuthService {
user.name, user.name,
user.email, user.email,
user.storeId, user.storeId,
tokenData.sessionId tokenData.sessionId,
); );
if (!newAccessToken || typeof newAccessToken !== 'string' || !newAccessToken.trim()) { if (
!newAccessToken ||
typeof newAccessToken !== 'string' ||
!newAccessToken.trim()
) {
throw new Error('Falha ao gerar novo token de acesso'); throw new Error('Falha ao gerar novo token de acesso');
} }
@@ -168,7 +222,11 @@ export class AuthService {
* @private * @private
*/ */
private validateRefreshTokenInput(refreshToken: string): void { private validateRefreshTokenInput(refreshToken: string): void {
if (!refreshToken || typeof refreshToken !== 'string' || !refreshToken.trim()) { if (
!refreshToken ||
typeof refreshToken !== 'string' ||
!refreshToken.trim()
) {
throw new BadRequestException('Refresh token não pode estar vazio'); throw new BadRequestException('Refresh token não pode estar vazio');
} }
@@ -187,18 +245,32 @@ export class AuthService {
*/ */
private validateUserDataForToken(user: any): void { private validateUserDataForToken(user: any): void {
if (!user.name || typeof user.name !== 'string' || !user.name.trim()) { if (!user.name || typeof user.name !== 'string' || !user.name.trim()) {
throw new BadRequestException('Dados do usuário incompletos: nome não encontrado'); throw new BadRequestException(
'Dados do usuário incompletos: nome não encontrado',
);
} }
if (!user.email || typeof user.email !== 'string' || !user.email.trim()) { if (!user.email || typeof user.email !== 'string' || !user.email.trim()) {
throw new BadRequestException('Dados do usuário incompletos: email não encontrado'); throw new BadRequestException(
'Dados do usuário incompletos: email não encontrado',
);
} }
if (!user.storeId || typeof user.storeId !== 'string' || !user.storeId.trim()) { if (
throw new BadRequestException('Dados do usuário incompletos: storeId não encontrado'); !user.storeId ||
typeof user.storeId !== 'string' ||
!user.storeId.trim()
) {
throw new BadRequestException(
'Dados do usuário incompletos: storeId não encontrado',
);
} }
if (user.sellerId !== null && user.sellerId !== undefined && user.sellerId < 0) { if (
user.sellerId !== null &&
user.sellerId !== undefined &&
user.sellerId < 0
) {
throw new BadRequestException('ID de vendedor inválido'); throw new BadRequestException('ID de vendedor inválido');
} }
} }
@@ -228,11 +300,15 @@ export class AuthService {
try { try {
decoded = this.jwtService.decode(token) as JwtPayload; decoded = this.jwtService.decode(token) as JwtPayload;
} catch (error) { } catch (error) {
throw new BadRequestException('Token inválido ou não pode ser decodificado'); throw new BadRequestException(
'Token inválido ou não pode ser decodificado',
);
} }
if (!decoded || !decoded.id) { if (!decoded || !decoded.id) {
throw new BadRequestException('Token inválido ou não pode ser decodificado'); throw new BadRequestException(
'Token inválido ou não pode ser decodificado',
);
} }
if (decoded.id <= 0) { if (decoded.id <= 0) {
@@ -241,25 +317,34 @@ export class AuthService {
if (decoded.sessionId && decoded.id && decoded.sessionId.trim()) { if (decoded.sessionId && decoded.id && decoded.sessionId.trim()) {
try { try {
await this.sessionManagementService.terminateSession(decoded.id, decoded.sessionId); await this.sessionManagementService.terminateSession(
decoded.id,
decoded.sessionId,
);
} catch (error) { } catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error); const errorMessage =
error instanceof Error ? error.message : String(error);
if (errorMessage.includes('Sessão não encontrada')) { if (errorMessage.includes('Sessão não encontrada')) {
throw new Error('Sessão não encontrada'); throw new Error('Sessão não encontrada');
} }
} }
} }
const isAlreadyBlacklisted = await this.tokenBlacklistService.isBlacklisted(token); const isAlreadyBlacklisted = await this.tokenBlacklistService.isBlacklisted(
token,
);
if (!isAlreadyBlacklisted) { if (!isAlreadyBlacklisted) {
try { try {
await this.tokenBlacklistService.addToBlacklist(token); await this.tokenBlacklistService.addToBlacklist(token);
} catch (error) { } catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error); const errorMessage =
error instanceof Error ? error.message : String(error);
if (errorMessage.includes('já está na blacklist')) { if (errorMessage.includes('já está na blacklist')) {
return; return;
} }
throw new Error(`Falha ao adicionar token à blacklist: ${errorMessage}`); throw new Error(
`Falha ao adicionar token à blacklist: ${errorMessage}`,
);
} }
} }
} }
@@ -289,4 +374,4 @@ export class AuthService {
async findUserByUsername(username: string) { async findUserByUsername(username: string) {
return this.userRepository.findByUsername(username); return this.userRepository.findByUsername(username);
} }
} }

View File

@@ -1,7 +1,6 @@
export class AuthenticateUserCommand { export class AuthenticateUserCommand {
constructor( constructor(
public readonly username: string, public readonly username: string,
public readonly password: string, public readonly password: string,
) {} ) {}
} }

View File

@@ -7,13 +7,18 @@ import { UserModel } from 'src/core/models/user.model';
@CommandHandler(AuthenticateUserCommand) @CommandHandler(AuthenticateUserCommand)
@Injectable() @Injectable()
export class AuthenticateUserHandler implements ICommandHandler<AuthenticateUserCommand> { export class AuthenticateUserHandler
implements ICommandHandler<AuthenticateUserCommand>
{
constructor(private readonly userRepository: UserRepository) {} constructor(private readonly userRepository: UserRepository) {}
async execute(command: AuthenticateUserCommand): Promise<Result<UserModel>> { async execute(command: AuthenticateUserCommand): Promise<Result<UserModel>> {
const { username, password } = command; const { username, password } = command;
const user = await this.userRepository.findByUsernameAndPassword(username, password); const user = await this.userRepository.findByUsernameAndPassword(
username,
password,
);
if (!user) { if (!user) {
return Result.fail('Usuário ou senha inválidos'); return Result.fail('Usuário ou senha inválidos');
@@ -30,7 +35,6 @@ export class AuthenticateUserHandler implements ICommandHandler<AuthenticateUser
if (user.situacao === 'B') { if (user.situacao === 'B') {
return Result.fail('Usuário bloqueado, login não permitido!'); return Result.fail('Usuário bloqueado, login não permitido!');
} }
return Result.ok(user); return Result.ok(user);
} }

View File

@@ -3,7 +3,7 @@ import { ApiProperty } from '@nestjs/swagger';
export class LoginResponseDto { export class LoginResponseDto {
@ApiProperty() id: number; @ApiProperty() id: number;
@ApiProperty() sellerId: number; @ApiProperty({ nullable: true }) sellerId: number | null;
@ApiProperty() name: string; @ApiProperty() name: string;
@ApiProperty() username: string; @ApiProperty() username: string;
@ApiProperty() storeId: string; @ApiProperty() storeId: string;

View File

@@ -1,5 +1,13 @@
import { ApiProperty } from '@nestjs/swagger'; import { ApiProperty } from '@nestjs/swagger';
import { IsOptional, IsNumber, IsString, IsBoolean, IsDateString, Min, Max } from 'class-validator'; import {
IsOptional,
IsNumber,
IsString,
IsBoolean,
IsDateString,
Min,
Max,
} from 'class-validator';
import { Type } from 'class-transformer'; import { Type } from 'class-transformer';
export class LoginAuditFiltersDto { export class LoginAuditFiltersDto {
@@ -19,7 +27,10 @@ export class LoginAuditFiltersDto {
@IsString() @IsString()
ipAddress?: string; ipAddress?: string;
@ApiProperty({ description: 'Filtrar apenas logins bem-sucedidos', required: false }) @ApiProperty({
description: 'Filtrar apenas logins bem-sucedidos',
required: false,
})
@IsOptional() @IsOptional()
@IsBoolean() @IsBoolean()
@Type(() => Boolean) @Type(() => Boolean)
@@ -35,7 +46,12 @@ export class LoginAuditFiltersDto {
@IsDateString() @IsDateString()
endDate?: string; endDate?: string;
@ApiProperty({ description: 'Número de registros por página', required: false, minimum: 1, maximum: 1000 }) @ApiProperty({
description: 'Número de registros por página',
required: false,
minimum: 1,
maximum: 1000,
})
@IsOptional() @IsOptional()
@IsNumber() @IsNumber()
@Type(() => Number) @Type(() => Number)
@@ -43,7 +59,11 @@ export class LoginAuditFiltersDto {
@Max(1000) @Max(1000)
limit?: number; limit?: number;
@ApiProperty({ description: 'Offset para paginação', required: false, minimum: 0 }) @ApiProperty({
description: 'Offset para paginação',
required: false,
minimum: 0,
})
@IsOptional() @IsOptional()
@IsNumber() @IsNumber()
@Type(() => Number) @Type(() => Number)
@@ -84,7 +104,10 @@ export class LoginAuditLogDto {
} }
export class LoginAuditResponseDto { export class LoginAuditResponseDto {
@ApiProperty({ description: 'Lista de logs de login', type: [LoginAuditLogDto] }) @ApiProperty({
description: 'Lista de logs de login',
type: [LoginAuditLogDto],
})
logs: LoginAuditLogDto[]; logs: LoginAuditLogDto[];
@ApiProperty({ description: 'Total de registros encontrados' }) @ApiProperty({ description: 'Total de registros encontrados' })
@@ -114,22 +137,30 @@ export class LoginStatsDto {
topIps: Array<{ ip: string; count: number }>; topIps: Array<{ ip: string; count: number }>;
@ApiProperty({ description: 'Estatísticas diárias' }) @ApiProperty({ description: 'Estatísticas diárias' })
dailyStats: Array<{ dailyStats: Array<{
date: string; date: string;
attempts: number; attempts: number;
successes: number; successes: number;
failures: number; failures: number;
}>; }>;
} }
export class LoginStatsFiltersDto { export class LoginStatsFiltersDto {
@ApiProperty({ description: 'ID do usuário para estatísticas', required: false }) @ApiProperty({
description: 'ID do usuário para estatísticas',
required: false,
})
@IsOptional() @IsOptional()
@IsNumber() @IsNumber()
@Type(() => Number) @Type(() => Number)
userId?: number; userId?: number;
@ApiProperty({ description: 'Número de dias para análise', required: false, minimum: 1, maximum: 365 }) @ApiProperty({
description: 'Número de dias para análise',
required: false,
minimum: 1,
maximum: 365,
})
@IsOptional() @IsOptional()
@IsNumber() @IsNumber()
@Type(() => Number) @Type(() => Number)

View File

@@ -50,7 +50,7 @@ describe('RateLimitingGuard - Tests that expose problems', () => {
describe('canActivate', () => { describe('canActivate', () => {
/** /**
* NOTA: Estes testes identificam problemas no método canActivate. * NOTA: Estes testes identificam problemas no método canActivate.
* *
* PROBLEMAS IDENTIFICADOS: * PROBLEMAS IDENTIFICADOS:
* 1. Não valida se IP extraído é válido * 1. Não valida se IP extraído é válido
* 2. Não valida se rate limiting service retorna dados válidos * 2. Não valida se rate limiting service retorna dados válidos
@@ -196,7 +196,7 @@ describe('RateLimitingGuard - Tests that expose problems', () => {
mockGetRequest.mockReturnValue(request); mockGetRequest.mockReturnValue(request);
mockRateLimitingService.isAllowed.mockRejectedValue( mockRateLimitingService.isAllowed.mockRejectedValue(
new Error('Erro de conexão com Redis') new Error('Erro de conexão com Redis'),
); );
try { try {
@@ -225,7 +225,7 @@ describe('RateLimitingGuard - Tests that expose problems', () => {
mockGetRequest.mockReturnValue(request); mockGetRequest.mockReturnValue(request);
mockRateLimitingService.isAllowed.mockResolvedValue(false); mockRateLimitingService.isAllowed.mockResolvedValue(false);
mockRateLimitingService.getAttemptInfo.mockRejectedValue( mockRateLimitingService.getAttemptInfo.mockRejectedValue(
new Error('Erro ao buscar informações') new Error('Erro ao buscar informações'),
); );
try { try {
@@ -336,7 +336,9 @@ describe('RateLimitingGuard - Tests that expose problems', () => {
const result = await guard.canActivate(mockExecutionContext); const result = await guard.canActivate(mockExecutionContext);
expect(result).toBe(true); expect(result).toBe(true);
expect(mockRateLimitingService.isAllowed).toHaveBeenCalledWith('192.168.1.1'); expect(mockRateLimitingService.isAllowed).toHaveBeenCalledWith(
'192.168.1.1',
);
}); });
it('should handle concurrent requests with same IP', async () => { it('should handle concurrent requests with same IP', async () => {
@@ -363,7 +365,7 @@ describe('RateLimitingGuard - Tests that expose problems', () => {
const results = await Promise.all(promises); const results = await Promise.all(promises);
results.forEach(result => { results.forEach((result) => {
expect(result).toBe(true); expect(result).toBe(true);
}); });
}); });
@@ -394,7 +396,9 @@ describe('RateLimitingGuard - Tests that expose problems', () => {
fail('Deveria ter lançado exceção'); fail('Deveria ter lançado exceção');
} catch (error) { } catch (error) {
expect(error).toBeInstanceOf(HttpException); expect(error).toBeInstanceOf(HttpException);
expect((error as HttpException).getStatus()).toBe(HttpStatus.TOO_MANY_REQUESTS); expect((error as HttpException).getStatus()).toBe(
HttpStatus.TOO_MANY_REQUESTS,
);
} }
}); });
@@ -419,7 +423,9 @@ describe('RateLimitingGuard - Tests that expose problems', () => {
fail('Deveria ter lançado exceção'); fail('Deveria ter lançado exceção');
} catch (error) { } catch (error) {
const response = (error as HttpException).getResponse() as any; const response = (error as HttpException).getResponse() as any;
expect(response.error).toBe('Muitas tentativas de login. Tente novamente em alguns minutos.'); expect(response.error).toBe(
'Muitas tentativas de login. Tente novamente em alguns minutos.',
);
expect(response.success).toBe(false); expect(response.success).toBe(false);
} }
}); });
@@ -512,7 +518,9 @@ describe('RateLimitingGuard - Tests that expose problems', () => {
const result = await guard.canActivate(mockExecutionContext); const result = await guard.canActivate(mockExecutionContext);
expect(result).toBe(true); expect(result).toBe(true);
expect(mockRateLimitingService.isAllowed).toHaveBeenCalledWith('2001:0db8:85a3:0000:0000:8a2e:0370:7334'); expect(mockRateLimitingService.isAllowed).toHaveBeenCalledWith(
'2001:0db8:85a3:0000:0000:8a2e:0370:7334',
);
}); });
it('should reject invalid IPv6 format', async () => { it('should reject invalid IPv6 format', async () => {
@@ -556,7 +564,9 @@ describe('RateLimitingGuard - Tests that expose problems', () => {
await guard.canActivate(mockExecutionContext); await guard.canActivate(mockExecutionContext);
expect(mockRateLimitingService.isAllowed).toHaveBeenCalledWith('192.168.1.1'); expect(mockRateLimitingService.isAllowed).toHaveBeenCalledWith(
'192.168.1.1',
);
}); });
it('should fallback to connection.remoteAddress when x-forwarded-for is missing', async () => { it('should fallback to connection.remoteAddress when x-forwarded-for is missing', async () => {
@@ -572,7 +582,9 @@ describe('RateLimitingGuard - Tests that expose problems', () => {
await guard.canActivate(mockExecutionContext); await guard.canActivate(mockExecutionContext);
expect(mockRateLimitingService.isAllowed).toHaveBeenCalledWith('10.0.0.1'); expect(mockRateLimitingService.isAllowed).toHaveBeenCalledWith(
'10.0.0.1',
);
}); });
it('should use default IP when all sources are missing', async () => { it('should use default IP when all sources are missing', async () => {
@@ -603,4 +615,3 @@ describe('RateLimitingGuard - Tests that expose problems', () => {
}); });
}); });
}); });

View File

@@ -1,4 +1,10 @@
import { Injectable, CanActivate, ExecutionContext, HttpException, HttpStatus } from '@nestjs/common'; import {
Injectable,
CanActivate,
ExecutionContext,
HttpException,
HttpStatus,
} from '@nestjs/common';
import { RateLimitingService } from '../services/rate-limiting.service'; import { RateLimitingService } from '../services/rate-limiting.service';
@Injectable() @Injectable()
@@ -19,7 +25,8 @@ export class RateLimitingGuard implements CanActivate {
try { try {
isAllowed = await this.rateLimitingService.isAllowed(ip); isAllowed = await this.rateLimitingService.isAllowed(ip);
} catch (error) { } catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error); const errorMessage =
error instanceof Error ? error.message : String(error);
throw new HttpException( throw new HttpException(
{ {
success: false, success: false,
@@ -30,13 +37,14 @@ export class RateLimitingGuard implements CanActivate {
HttpStatus.INTERNAL_SERVER_ERROR, HttpStatus.INTERNAL_SERVER_ERROR,
); );
} }
if (!isAllowed) { if (!isAllowed) {
let attemptInfo; let attemptInfo;
try { try {
attemptInfo = await this.rateLimitingService.getAttemptInfo(ip); attemptInfo = await this.rateLimitingService.getAttemptInfo(ip);
} catch (error) { } catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error); const errorMessage =
error instanceof Error ? error.message : String(error);
throw new HttpException( throw new HttpException(
{ {
success: false, success: false,
@@ -49,11 +57,12 @@ export class RateLimitingGuard implements CanActivate {
} }
this.validateAttemptInfo(attemptInfo); this.validateAttemptInfo(attemptInfo);
throw new HttpException( throw new HttpException(
{ {
success: false, success: false,
error: 'Muitas tentativas de login. Tente novamente em alguns minutos.', error:
'Muitas tentativas de login. Tente novamente em alguns minutos.',
data: null, data: null,
details: { details: {
attempts: attemptInfo.attempts, attempts: attemptInfo.attempts,
@@ -73,13 +82,16 @@ export class RateLimitingGuard implements CanActivate {
* @returns Endereço IP do cliente ou '127.0.0.1' se não encontrado * @returns Endereço IP do cliente ou '127.0.0.1' se não encontrado
*/ */
private getClientIp(request: any): string { private getClientIp(request: any): string {
const forwardedFor = request.headers['x-forwarded-for']?.split(',')[0]?.trim(); const forwardedFor = request.headers['x-forwarded-for']
?.split(',')[0]
?.trim();
const realIp = request.headers['x-real-ip']?.trim(); const realIp = request.headers['x-real-ip']?.trim();
const connectionIp = request.connection?.remoteAddress; const connectionIp = request.connection?.remoteAddress;
const socketIp = request.socket?.remoteAddress; const socketIp = request.socket?.remoteAddress;
const requestIp = request.ip; const requestIp = request.ip;
const rawIp = forwardedFor || realIp || connectionIp || socketIp || requestIp; const rawIp =
forwardedFor || realIp || connectionIp || socketIp || requestIp;
if (rawIp === null || rawIp === undefined) { if (rawIp === null || rawIp === undefined) {
return ''; return '';
@@ -90,7 +102,7 @@ export class RateLimitingGuard implements CanActivate {
} }
const trimmedIp = rawIp.trim(); const trimmedIp = rawIp.trim();
if (trimmedIp === '') { if (trimmedIp === '') {
return ''; return '';
} }
@@ -144,7 +156,11 @@ export class RateLimitingGuard implements CanActivate {
return; return;
} }
if (!ipv4Regex.test(ip) && !ipv6Regex.test(ip) && !ipv6CompressedRegex.test(ip)) { if (
!ipv4Regex.test(ip) &&
!ipv6Regex.test(ip) &&
!ipv6CompressedRegex.test(ip)
) {
if (!this.isValidIpv4(ip) && !this.isValidIpv6(ip)) { if (!this.isValidIpv4(ip) && !this.isValidIpv6(ip)) {
throw new HttpException( throw new HttpException(
{ {
@@ -166,7 +182,7 @@ export class RateLimitingGuard implements CanActivate {
const parts = ip.split('.'); const parts = ip.split('.');
if (parts.length !== 4) return false; if (parts.length !== 4) return false;
return parts.every(part => { return parts.every((part) => {
const num = parseInt(part, 10); const num = parseInt(part, 10);
return !isNaN(num) && num >= 0 && num <= 255; return !isNaN(num) && num >= 0 && num <= 255;
}); });
@@ -180,17 +196,17 @@ export class RateLimitingGuard implements CanActivate {
if (ip.includes('::')) { if (ip.includes('::')) {
const parts = ip.split('::'); const parts = ip.split('::');
if (parts.length > 2) return false; if (parts.length > 2) return false;
const leftParts = parts[0] ? parts[0].split(':') : []; const leftParts = parts[0] ? parts[0].split(':') : [];
const rightParts = parts[1] ? parts[1].split(':') : []; const rightParts = parts[1] ? parts[1].split(':') : [];
return (leftParts.length + rightParts.length) <= 8; return leftParts.length + rightParts.length <= 8;
} }
const parts = ip.split(':'); const parts = ip.split(':');
if (parts.length !== 8) return false; if (parts.length !== 8) return false;
return parts.every(part => { return parts.every((part) => {
if (!part) return false; if (!part) return false;
return /^[0-9a-fA-F]{1,4}$/.test(part); return /^[0-9a-fA-F]{1,4}$/.test(part);
}); });
@@ -223,8 +239,11 @@ export class RateLimitingGuard implements CanActivate {
); );
} }
if (attemptInfo.remainingTime !== undefined && if (
(typeof attemptInfo.remainingTime !== 'number' || attemptInfo.remainingTime < 0)) { attemptInfo.remainingTime !== undefined &&
(typeof attemptInfo.remainingTime !== 'number' ||
attemptInfo.remainingTime < 0)
) {
throw new HttpException( throw new HttpException(
{ {
success: false, success: false,

View File

@@ -1,11 +1,11 @@
/* eslint-disable prettier/prettier */ /* eslint-disable prettier/prettier */
export interface JwtPayload { export interface JwtPayload {
id: number; id: number;
sellerId: number; sellerId: number | null;
storeId: string; storeId: string;
username: string; username: string;
email: string; email: string;
exp?: number; // Timestamp de expiração do JWT exp?: number; // Timestamp de expiração do JWT
sessionId?: string; // ID da sessão atual sessionId?: string; // ID da sessão atual
} }

View File

@@ -1,16 +1,15 @@
export class Result<T> { export class Result<T> {
private constructor( private constructor(
public readonly success: boolean, public readonly success: boolean,
public readonly data?: T, public readonly data?: T,
public readonly error?: string, public readonly error?: string,
) {} ) {}
static ok<U>(data: U): Result<U> { static ok<U>(data: U): Result<U> {
return new Result<U>(true, data); return new Result<U>(true, data);
}
static fail<U>(message: string): Result<U> {
return new Result<U>(false, undefined, message);
}
} }
static fail<U>(message: string): Result<U> {
return new Result<U>(false, undefined, message);
}
}

View File

@@ -0,0 +1,64 @@
import { Test, TestingModule } from '@nestjs/testing';
import { RefreshTokenService } from '../refresh-token.service';
import { IRedisClient } from '../../../core/configs/cache/IRedisClient';
import { RedisClientToken } from '../../../core/configs/cache/redis-client.adapter.provider';
import { JwtService } from '@nestjs/jwt';
export const createMockRedisClient = () =>
({
get: jest.fn(),
set: jest.fn(),
del: jest.fn(),
keys: jest.fn(),
} as any);
export const createMockJwtService = () =>
({
sign: jest.fn(),
verify: jest.fn(),
decode: jest.fn(),
} as any);
export interface RefreshTokenServiceTestContext {
service: RefreshTokenService;
mockRedisClient: jest.Mocked<IRedisClient>;
mockJwtService: jest.Mocked<JwtService>;
}
export async function createRefreshTokenServiceTestModule(
redisClientMethods: Partial<IRedisClient> = {},
jwtServiceMethods: Partial<JwtService> = {},
): Promise<RefreshTokenServiceTestContext> {
const mockRedisClient = {
...createMockRedisClient(),
...redisClientMethods,
} as any;
const mockJwtService = {
...createMockJwtService(),
...jwtServiceMethods,
} as any;
const module: TestingModule = await Test.createTestingModule({
providers: [
RefreshTokenService,
{
provide: RedisClientToken,
useValue: mockRedisClient,
},
{
provide: JwtService,
useValue: mockJwtService,
},
],
}).compile();
const service = module.get<RefreshTokenService>(RefreshTokenService);
return {
service,
mockRedisClient,
mockJwtService,
};
}

View File

@@ -0,0 +1,392 @@
import { UnauthorizedException } from '@nestjs/common';
import { createRefreshTokenServiceTestModule } from './refresh-token.service.spec.helper';
import { RefreshTokenData } from '../refresh-token.service';
describe('RefreshTokenService', () => {
describe('generateRefreshToken', () => {
let context: Awaited<
ReturnType<typeof createRefreshTokenServiceTestModule>
>;
beforeEach(async () => {
context = await createRefreshTokenServiceTestModule();
});
afterEach(() => {
jest.clearAllMocks();
});
it('deve gerar refresh token com sucesso', async () => {
const userId = 123;
const sessionId = 'session-123';
const mockToken = 'mock.refresh.token';
const mockTokenId = 'token-id-123';
jest.spyOn(require('crypto'), 'randomBytes').mockReturnValue({
toString: () => mockTokenId,
});
context.mockJwtService.sign.mockReturnValue(mockToken);
context.mockRedisClient.set.mockResolvedValue(undefined);
context.mockRedisClient.keys.mockResolvedValue([]);
const result = await context.service.generateRefreshToken(
userId,
sessionId,
);
expect(result).toBe(mockToken);
expect(context.mockJwtService.sign).toHaveBeenCalledWith(
{
userId,
tokenId: mockTokenId,
sessionId,
type: 'refresh',
},
{ expiresIn: '7d' },
);
expect(context.mockRedisClient.set).toHaveBeenCalled();
});
it('deve gerar refresh token sem sessionId', async () => {
const userId = 123;
const mockToken = 'mock.refresh.token';
const mockTokenId = 'token-id-123';
jest.spyOn(require('crypto'), 'randomBytes').mockReturnValue({
toString: () => mockTokenId,
});
context.mockJwtService.sign.mockReturnValue(mockToken);
context.mockRedisClient.set.mockResolvedValue(undefined);
context.mockRedisClient.keys.mockResolvedValue([]);
const result = await context.service.generateRefreshToken(userId);
expect(result).toBe(mockToken);
expect(context.mockJwtService.sign).toHaveBeenCalledWith(
{
userId,
tokenId: mockTokenId,
sessionId: undefined,
type: 'refresh',
},
{ expiresIn: '7d' },
);
});
it('deve limitar número de refresh tokens por usuário', async () => {
const userId = 123;
const mockToken = 'mock.refresh.token';
const mockTokenId = 'token-id-123';
jest.spyOn(require('crypto'), 'randomBytes').mockReturnValue({
toString: () => mockTokenId,
});
context.mockJwtService.sign.mockReturnValue(mockToken);
context.mockRedisClient.set.mockResolvedValue(undefined);
const existingTokens: RefreshTokenData[] = Array.from(
{ length: 6 },
(_, i) => ({
userId,
tokenId: `token-${i}`,
expiresAt: Date.now() + 1000000,
createdAt: Date.now(),
}),
);
context.mockRedisClient.keys.mockResolvedValue([
'auth:refresh_tokens:123:token-0',
'auth:refresh_tokens:123:token-1',
'auth:refresh_tokens:123:token-2',
'auth:refresh_tokens:123:token-3',
'auth:refresh_tokens:123:token-4',
'auth:refresh_tokens:123:token-5',
]);
context.mockRedisClient.get
.mockResolvedValueOnce(existingTokens[0])
.mockResolvedValueOnce(existingTokens[1])
.mockResolvedValueOnce(existingTokens[2])
.mockResolvedValueOnce(existingTokens[3])
.mockResolvedValueOnce(existingTokens[4])
.mockResolvedValueOnce(existingTokens[5]);
await context.service.generateRefreshToken(userId);
expect(context.mockRedisClient.del).toHaveBeenCalled();
});
});
describe('validateRefreshToken', () => {
let context: Awaited<
ReturnType<typeof createRefreshTokenServiceTestModule>
>;
beforeEach(async () => {
context = await createRefreshTokenServiceTestModule();
});
afterEach(() => {
jest.clearAllMocks();
});
it('deve validar refresh token com sucesso', async () => {
const mockDecoded = {
userId: 123,
tokenId: 'token-id-123',
sessionId: 'session-123',
type: 'refresh',
};
const mockTokenData: RefreshTokenData = {
userId: 123,
tokenId: 'token-id-123',
sessionId: 'session-123',
expiresAt: Date.now() + 1000000,
createdAt: Date.now(),
};
context.mockJwtService.verify.mockReturnValue(mockDecoded);
context.mockRedisClient.get.mockResolvedValue(mockTokenData);
const result = await context.service.validateRefreshToken(
'valid.refresh.token',
);
expect(result.id).toBe(123);
expect((result as any).tokenId).toBe('token-id-123');
expect(result.sessionId).toBe('session-123');
});
it('deve lançar exceção quando token não é do tipo refresh', async () => {
const mockDecoded = {
userId: 123,
tokenId: 'token-id-123',
type: 'access',
};
context.mockJwtService.verify.mockReturnValue(mockDecoded);
await expect(
context.service.validateRefreshToken('invalid.token'),
).rejects.toThrow(UnauthorizedException);
});
it('deve lançar exceção quando token não existe no Redis', async () => {
const mockDecoded = {
userId: 123,
tokenId: 'token-id-123',
sessionId: 'session-123',
type: 'refresh',
};
context.mockJwtService.verify.mockReturnValue(mockDecoded);
context.mockRedisClient.get.mockResolvedValue(null);
await expect(
context.service.validateRefreshToken('expired.token'),
).rejects.toThrow(UnauthorizedException);
});
it('deve lançar exceção quando token está expirado', async () => {
const mockDecoded = {
userId: 123,
tokenId: 'token-id-123',
sessionId: 'session-123',
type: 'refresh',
};
const mockTokenData: RefreshTokenData = {
userId: 123,
tokenId: 'token-id-123',
sessionId: 'session-123',
expiresAt: Date.now() - 1000,
createdAt: Date.now() - 1000000,
};
context.mockJwtService.verify.mockReturnValue(mockDecoded);
context.mockRedisClient.get.mockResolvedValue(mockTokenData);
context.mockRedisClient.del.mockResolvedValue(undefined);
await expect(
context.service.validateRefreshToken('expired.token'),
).rejects.toThrow(UnauthorizedException);
expect(context.mockRedisClient.del).toHaveBeenCalled();
});
it('deve lançar exceção quando verificação do JWT falha', async () => {
context.mockJwtService.verify.mockImplementation(() => {
throw new Error('Token inválido');
});
await expect(
context.service.validateRefreshToken('invalid.token'),
).rejects.toThrow(UnauthorizedException);
});
});
describe('revokeRefreshToken', () => {
let context: Awaited<
ReturnType<typeof createRefreshTokenServiceTestModule>
>;
beforeEach(async () => {
context = await createRefreshTokenServiceTestModule();
});
afterEach(() => {
jest.clearAllMocks();
});
it('deve revogar refresh token com sucesso', async () => {
const userId = 123;
const tokenId = 'token-id-123';
context.mockRedisClient.del.mockResolvedValue(undefined);
await context.service.revokeRefreshToken(userId, tokenId);
expect(context.mockRedisClient.del).toHaveBeenCalledWith(
`auth:refresh_tokens:${userId}:${tokenId}`,
);
});
});
describe('revokeAllRefreshTokens', () => {
let context: Awaited<
ReturnType<typeof createRefreshTokenServiceTestModule>
>;
beforeEach(async () => {
context = await createRefreshTokenServiceTestModule();
});
afterEach(() => {
jest.clearAllMocks();
});
it('deve revogar todos os refresh tokens do usuário', async () => {
const userId = 123;
const mockKeys = [
'auth:refresh_tokens:123:token-1',
'auth:refresh_tokens:123:token-2',
'auth:refresh_tokens:123:token-3',
];
context.mockRedisClient.keys.mockResolvedValue(mockKeys);
context.mockRedisClient.del.mockResolvedValue(undefined);
await context.service.revokeAllRefreshTokens(userId);
expect(context.mockRedisClient.keys).toHaveBeenCalledWith(
`auth:refresh_tokens:${userId}:*`,
);
expect(context.mockRedisClient.del).toHaveBeenCalledWith(...mockKeys);
});
it('deve retornar sem erro quando não há tokens para revogar', async () => {
const userId = 123;
context.mockRedisClient.keys.mockResolvedValue([]);
await context.service.revokeAllRefreshTokens(userId);
expect(context.mockRedisClient.del).not.toHaveBeenCalled();
});
});
describe('getActiveRefreshTokens', () => {
let context: Awaited<
ReturnType<typeof createRefreshTokenServiceTestModule>
>;
beforeEach(async () => {
context = await createRefreshTokenServiceTestModule();
});
afterEach(() => {
jest.clearAllMocks();
});
it('deve retornar tokens ativos ordenados por data de criação', async () => {
const userId = 123;
const mockKeys = [
'auth:refresh_tokens:123:token-1',
'auth:refresh_tokens:123:token-2',
];
const now = Date.now();
const token1: RefreshTokenData = {
userId: 123,
tokenId: 'token-1',
expiresAt: now + 1000000,
createdAt: now - 2000,
};
const token2: RefreshTokenData = {
userId: 123,
tokenId: 'token-2',
expiresAt: now + 1000000,
createdAt: now - 1000,
};
context.mockRedisClient.keys.mockResolvedValue(mockKeys);
context.mockRedisClient.get
.mockResolvedValueOnce(token1)
.mockResolvedValueOnce(token2);
const result = await context.service.getActiveRefreshTokens(userId);
expect(result).toHaveLength(2);
expect(result[0].tokenId).toBe('token-2');
expect(result[1].tokenId).toBe('token-1');
});
it('deve filtrar tokens expirados', async () => {
const userId = 123;
const mockKeys = [
'auth:refresh_tokens:123:token-1',
'auth:refresh_tokens:123:token-2',
];
const now = Date.now();
const token1: RefreshTokenData = {
userId: 123,
tokenId: 'token-1',
expiresAt: now - 1000,
createdAt: now - 2000,
};
const token2: RefreshTokenData = {
userId: 123,
tokenId: 'token-2',
expiresAt: now + 1000000,
createdAt: now - 1000,
};
context.mockRedisClient.keys.mockResolvedValue(mockKeys);
context.mockRedisClient.get
.mockResolvedValueOnce(token1)
.mockResolvedValueOnce(token2);
const result = await context.service.getActiveRefreshTokens(userId);
expect(result).toHaveLength(1);
expect(result[0].tokenId).toBe('token-2');
});
it('deve retornar array vazio quando não há tokens', async () => {
const userId = 123;
context.mockRedisClient.keys.mockResolvedValue([]);
const result = await context.service.getActiveRefreshTokens(userId);
expect(result).toHaveLength(0);
});
});
});

View File

@@ -0,0 +1,62 @@
import { Test, TestingModule } from '@nestjs/testing';
import { TokenBlacklistService } from '../token-blacklist.service';
import { IRedisClient } from '../../../core/configs/cache/IRedisClient';
import { RedisClientToken } from '../../../core/configs/cache/redis-client.adapter.provider';
import { JwtService } from '@nestjs/jwt';
export const createMockRedisClient = () =>
({
get: jest.fn(),
set: jest.fn(),
del: jest.fn(),
keys: jest.fn(),
} as any);
export const createMockJwtService = () =>
({
decode: jest.fn(),
} as any);
export interface TokenBlacklistServiceTestContext {
service: TokenBlacklistService;
mockRedisClient: jest.Mocked<IRedisClient>;
mockJwtService: jest.Mocked<JwtService>;
}
export async function createTokenBlacklistServiceTestModule(
redisClientMethods: Partial<IRedisClient> = {},
jwtServiceMethods: Partial<JwtService> = {},
): Promise<TokenBlacklistServiceTestContext> {
const mockRedisClient = {
...createMockRedisClient(),
...redisClientMethods,
} as any;
const mockJwtService = {
...createMockJwtService(),
...jwtServiceMethods,
} as any;
const module: TestingModule = await Test.createTestingModule({
providers: [
TokenBlacklistService,
{
provide: RedisClientToken,
useValue: mockRedisClient,
},
{
provide: JwtService,
useValue: mockJwtService,
},
],
}).compile();
const service = module.get<TokenBlacklistService>(TokenBlacklistService);
return {
service,
mockRedisClient,
mockJwtService,
};
}

View File

@@ -0,0 +1,257 @@
import { createTokenBlacklistServiceTestModule } from './token-blacklist.service.spec.helper';
import { JwtPayload } from '../../models/jwt-payload.model';
describe('TokenBlacklistService', () => {
describe('addToBlacklist', () => {
let context: Awaited<
ReturnType<typeof createTokenBlacklistServiceTestModule>
>;
beforeEach(async () => {
context = await createTokenBlacklistServiceTestModule();
});
afterEach(() => {
jest.clearAllMocks();
});
it('deve adicionar token à blacklist com sucesso', async () => {
const mockToken = 'valid.jwt.token';
const mockPayload: JwtPayload = {
id: 123,
sellerId: 1,
storeId: '1',
username: 'user',
email: 'user@example.com',
exp: Math.floor(Date.now() / 1000) + 3600,
};
context.mockJwtService.decode.mockReturnValue(mockPayload);
context.mockRedisClient.set.mockResolvedValue(undefined);
await context.service.addToBlacklist(mockToken);
expect(context.mockJwtService.decode).toHaveBeenCalledWith(mockToken);
expect(context.mockRedisClient.set).toHaveBeenCalled();
});
it('deve adicionar token à blacklist com TTL customizado', async () => {
const mockToken = 'valid.jwt.token';
const mockPayload: JwtPayload = {
id: 123,
sellerId: 1,
storeId: '1',
username: 'user',
email: 'user@example.com',
exp: Math.floor(Date.now() / 1000) + 3600,
};
const customTTL = 7200;
context.mockJwtService.decode.mockReturnValue(mockPayload);
context.mockRedisClient.set.mockResolvedValue(undefined);
await context.service.addToBlacklist(mockToken, customTTL);
expect(context.mockRedisClient.set).toHaveBeenCalledWith(
expect.any(String),
'blacklisted',
customTTL,
);
});
it('deve calcular TTL automaticamente quando não informado', async () => {
const mockToken = 'valid.jwt.token';
const now = Math.floor(Date.now() / 1000);
const exp = now + 3600;
const mockPayload: JwtPayload = {
id: 123,
sellerId: 1,
storeId: '1',
username: 'user',
email: 'user@example.com',
exp,
};
context.mockJwtService.decode.mockReturnValue(mockPayload);
context.mockRedisClient.set.mockResolvedValue(undefined);
await context.service.addToBlacklist(mockToken);
expect(context.mockRedisClient.set).toHaveBeenCalledWith(
expect.any(String),
'blacklisted',
expect.any(Number),
);
});
it('deve lançar erro quando token é inválido', async () => {
const mockToken = 'invalid.token';
context.mockJwtService.decode.mockReturnValue(null);
await expect(
context.service.addToBlacklist(mockToken),
).rejects.toThrow('Token inválido');
});
it('deve lançar erro quando decode falha', async () => {
const mockToken = 'invalid.token';
context.mockJwtService.decode.mockImplementation(() => {
throw new Error('Token malformado');
});
await expect(
context.service.addToBlacklist(mockToken),
).rejects.toThrow('Erro ao adicionar token à blacklist');
});
});
describe('isBlacklisted', () => {
let context: Awaited<
ReturnType<typeof createTokenBlacklistServiceTestModule>
>;
beforeEach(async () => {
context = await createTokenBlacklistServiceTestModule();
});
afterEach(() => {
jest.clearAllMocks();
});
it('deve retornar true quando token está na blacklist', async () => {
const mockToken = 'blacklisted.token';
const mockPayload: JwtPayload = {
id: 123,
sellerId: 1,
storeId: '1',
username: 'user',
email: 'user@example.com',
};
context.mockJwtService.decode.mockReturnValue(mockPayload);
context.mockRedisClient.get.mockResolvedValue('blacklisted');
const result = await context.service.isBlacklisted(mockToken);
expect(result).toBe(true);
expect(context.mockRedisClient.get).toHaveBeenCalled();
});
it('deve retornar false quando token não está na blacklist', async () => {
const mockToken = 'valid.token';
const mockPayload: JwtPayload = {
id: 123,
sellerId: 1,
storeId: '1',
username: 'user',
email: 'user@example.com',
};
context.mockJwtService.decode.mockReturnValue(mockPayload);
context.mockRedisClient.get.mockResolvedValue(null);
const result = await context.service.isBlacklisted(mockToken);
expect(result).toBe(false);
});
it('deve retornar false quando ocorre erro', async () => {
const mockToken = 'error.token';
const mockPayload: JwtPayload = {
id: 123,
sellerId: 1,
storeId: '1',
username: 'user',
email: 'user@example.com',
};
context.mockJwtService.decode.mockReturnValue(mockPayload);
context.mockRedisClient.get.mockRejectedValue(
new Error('Redis error'),
);
const result = await context.service.isBlacklisted(mockToken);
expect(result).toBe(false);
});
});
describe('removeFromBlacklist', () => {
let context: Awaited<
ReturnType<typeof createTokenBlacklistServiceTestModule>
>;
beforeEach(async () => {
context = await createTokenBlacklistServiceTestModule();
});
afterEach(() => {
jest.clearAllMocks();
});
it('deve remover token da blacklist com sucesso', async () => {
const mockToken = 'token.to.remove';
const mockPayload: JwtPayload = {
id: 123,
sellerId: 1,
storeId: '1',
username: 'user',
email: 'user@example.com',
};
context.mockJwtService.decode.mockReturnValue(mockPayload);
context.mockRedisClient.del.mockResolvedValue(undefined);
await context.service.removeFromBlacklist(mockToken);
expect(context.mockRedisClient.del).toHaveBeenCalled();
});
});
describe('clearUserBlacklist', () => {
let context: Awaited<
ReturnType<typeof createTokenBlacklistServiceTestModule>
>;
beforeEach(async () => {
context = await createTokenBlacklistServiceTestModule();
});
afterEach(() => {
jest.clearAllMocks();
});
it('deve limpar todos os tokens do usuário da blacklist', async () => {
const userId = 123;
const mockKeys = [
'auth:blacklist:123:hash1',
'auth:blacklist:123:hash2',
'auth:blacklist:123:hash3',
];
context.mockRedisClient.keys.mockResolvedValue(mockKeys);
context.mockRedisClient.del.mockResolvedValue(undefined);
await context.service.clearUserBlacklist(userId);
expect(context.mockRedisClient.keys).toHaveBeenCalledWith(
`auth:blacklist:${userId}:*`,
);
expect(context.mockRedisClient.del).toHaveBeenCalledWith(...mockKeys);
});
it('deve retornar sem erro quando não há tokens para limpar', async () => {
const userId = 123;
context.mockRedisClient.keys.mockResolvedValue([]);
await context.service.clearUserBlacklist(userId);
expect(context.mockRedisClient.del).not.toHaveBeenCalled();
});
});
});

View File

@@ -31,14 +31,14 @@ export class LoginAuditService {
private readonly LOG_PREFIX = 'login_audit'; private readonly LOG_PREFIX = 'login_audit';
private readonly LOG_EXPIRY = 30 * 24 * 60 * 60; private readonly LOG_EXPIRY = 30 * 24 * 60 * 60;
constructor( constructor(@Inject('REDIS_CLIENT') private readonly redis: Redis) {}
@Inject('REDIS_CLIENT') private readonly redis: Redis,
) {}
async logLoginAttempt(log: Omit<LoginAuditLog, 'id' | 'timestamp'>): Promise<void> { async logLoginAttempt(
log: Omit<LoginAuditLog, 'id' | 'timestamp'>,
): Promise<void> {
const logId = this.generateLogId(); const logId = this.generateLogId();
const timestamp = DateUtil.now(); const timestamp = DateUtil.now();
const auditLog: LoginAuditLog = { const auditLog: LoginAuditLog = {
...log, ...log,
id: logId, id: logId,
@@ -69,24 +69,26 @@ export class LoginAuditService {
await this.redis.expire(dateLogsKey, this.LOG_EXPIRY); await this.redis.expire(dateLogsKey, this.LOG_EXPIRY);
} }
async getLoginLogs(filters: LoginAuditFilters = {}): Promise<LoginAuditLog[]> { async getLoginLogs(
filters: LoginAuditFilters = {},
): Promise<LoginAuditLog[]> {
const logIds = await this.getLogIds(filters); const logIds = await this.getLogIds(filters);
const logs: LoginAuditLog[] = []; const logs: LoginAuditLog[] = [];
for (const logId of logIds) { for (const logId of logIds) {
const logKey = this.buildLogKey(logId); const logKey = this.buildLogKey(logId);
const logData = await this.redis.get(logKey); const logData = await this.redis.get(logKey);
if (!logData) { if (!logData) {
continue; continue;
} }
const log: LoginAuditLog = JSON.parse(logData as string); const log: LoginAuditLog = JSON.parse(logData as string);
if (typeof log.timestamp === 'string') { if (typeof log.timestamp === 'string') {
log.timestamp = new Date(log.timestamp); log.timestamp = new Date(log.timestamp);
} }
if (!this.matchesFilters(log, filters)) { if (!this.matchesFilters(log, filters)) {
continue; continue;
} }
@@ -98,21 +100,29 @@ export class LoginAuditService {
const offset = filters.offset || 0; const offset = filters.offset || 0;
const limit = filters.limit || 100; const limit = filters.limit || 100;
return logs.slice(offset, offset + limit); return logs.slice(offset, offset + limit);
} }
async getLoginStats(userId?: number, days: number = 7): Promise<{ async getLoginStats(
userId?: number,
days: number = 7,
): Promise<{
totalAttempts: number; totalAttempts: number;
successfulLogins: number; successfulLogins: number;
failedLogins: number; failedLogins: number;
uniqueIps: number; uniqueIps: number;
topIps: Array<{ ip: string; count: number }>; topIps: Array<{ ip: string; count: number }>;
dailyStats: Array<{ date: string; attempts: number; successes: number; failures: number }>; dailyStats: Array<{
date: string;
attempts: number;
successes: number;
failures: number;
}>;
}> { }> {
const endDate = DateUtil.now(); const endDate = DateUtil.now();
const startDate = new Date(endDate.getTime() - days * 24 * 60 * 60 * 1000); const startDate = new Date(endDate.getTime() - days * 24 * 60 * 60 * 1000);
const filters: LoginAuditFilters = { const filters: LoginAuditFilters = {
startDate, startDate,
endDate, endDate,
@@ -124,38 +134,50 @@ export class LoginAuditService {
} }
const logs = await this.getLoginLogs(filters); const logs = await this.getLoginLogs(filters);
const stats = { const stats = {
totalAttempts: logs.length, totalAttempts: logs.length,
successfulLogins: logs.filter(log => log.success).length, successfulLogins: logs.filter((log) => log.success).length,
failedLogins: logs.filter(log => !log.success).length, failedLogins: logs.filter((log) => !log.success).length,
uniqueIps: new Set(logs.map(log => log.ipAddress)).size, uniqueIps: new Set(logs.map((log) => log.ipAddress)).size,
topIps: [] as Array<{ ip: string; count: number }>, topIps: [] as Array<{ ip: string; count: number }>,
dailyStats: [] as Array<{ date: string; attempts: number; successes: number; failures: number }>, dailyStats: [] as Array<{
date: string;
attempts: number;
successes: number;
failures: number;
}>,
}; };
const ipCounts = new Map<string, number>(); const ipCounts = new Map<string, number>();
logs.forEach(log => { logs.forEach((log) => {
ipCounts.set(log.ipAddress, (ipCounts.get(log.ipAddress) || 0) + 1); ipCounts.set(log.ipAddress, (ipCounts.get(log.ipAddress) || 0) + 1);
}); });
stats.topIps = Array.from(ipCounts.entries()) stats.topIps = Array.from(ipCounts.entries())
.map(([ip, count]) => ({ ip, count })) .map(([ip, count]) => ({ ip, count }))
.sort((a, b) => b.count - a.count) .sort((a, b) => b.count - a.count)
.slice(0, 10); .slice(0, 10);
const dailyCounts = new Map<string, { attempts: number; successes: number; failures: number }>(); const dailyCounts = new Map<
logs.forEach(log => { string,
{ attempts: number; successes: number; failures: number }
>();
logs.forEach((log) => {
const date = DateUtil.toBrazilString(log.timestamp, 'yyyy-MM-dd'); const date = DateUtil.toBrazilString(log.timestamp, 'yyyy-MM-dd');
const dayStats = dailyCounts.get(date) || { attempts: 0, successes: 0, failures: 0 }; const dayStats = dailyCounts.get(date) || {
attempts: 0,
successes: 0,
failures: 0,
};
dayStats.attempts++; dayStats.attempts++;
if (log.success) { if (log.success) {
dayStats.successes++; dayStats.successes++;
dailyCounts.set(date, dayStats); dailyCounts.set(date, dayStats);
return; return;
} }
dayStats.failures++; dayStats.failures++;
dailyCounts.set(date, dayStats); dailyCounts.set(date, dayStats);
}); });
@@ -168,9 +190,11 @@ export class LoginAuditService {
} }
async cleanupOldLogs(): Promise<void> { async cleanupOldLogs(): Promise<void> {
const cutoffDate = new Date(DateUtil.nowTimestamp() - 30 * 24 * 60 * 60 * 1000); const cutoffDate = new Date(
DateUtil.nowTimestamp() - 30 * 24 * 60 * 60 * 1000,
);
const cutoffDateStr = DateUtil.toBrazilString(cutoffDate, 'yyyy-MM-dd'); const cutoffDateStr = DateUtil.toBrazilString(cutoffDate, 'yyyy-MM-dd');
const oldDates = this.getDateRange(new Date('2020-01-01'), cutoffDate); const oldDates = this.getDateRange(new Date('2020-01-01'), cutoffDate);
for (const date of oldDates) { for (const date of oldDates) {
const dateLogsKey = this.buildDateLogsKey(date); const dateLogsKey = this.buildDateLogsKey(date);
@@ -190,18 +214,20 @@ export class LoginAuditService {
} }
if (filters.startDate || filters.endDate) { if (filters.startDate || filters.endDate) {
const startDate = filters.startDate || new Date(DateUtil.nowTimestamp() - 7 * 24 * 60 * 60 * 1000); const startDate =
filters.startDate ||
new Date(DateUtil.nowTimestamp() - 7 * 24 * 60 * 60 * 1000);
const endDate = filters.endDate || DateUtil.now(); const endDate = filters.endDate || DateUtil.now();
const dates = this.getDateRange(startDate, endDate); const dates = this.getDateRange(startDate, endDate);
const logIds: string[] = []; const logIds: string[] = [];
for (const date of dates) { for (const date of dates) {
const dateLogsKey = this.buildDateLogsKey(date); const dateLogsKey = this.buildDateLogsKey(date);
const dateLogIds = await this.redis.lrange(dateLogsKey, 0, -1); const dateLogIds = await this.redis.lrange(dateLogsKey, 0, -1);
logIds.push(...dateLogIds); logIds.push(...dateLogIds);
} }
return logIds; return logIds;
} }
@@ -210,7 +236,9 @@ export class LoginAuditService {
} }
private generateLogId(): string { private generateLogId(): string {
return `${DateUtil.nowTimestamp()}_${Math.random().toString(36).substr(2, 9)}`; return `${DateUtil.nowTimestamp()}_${Math.random()
.toString(36)
.substr(2, 9)}`;
} }
private buildLogKey(logId: string): string { private buildLogKey(logId: string): string {
@@ -233,11 +261,17 @@ export class LoginAuditService {
return `${this.LOG_PREFIX}:date:${date}`; return `${this.LOG_PREFIX}:date:${date}`;
} }
private matchesFilters(log: LoginAuditLog, filters: LoginAuditFilters): boolean { private matchesFilters(
if (filters.username && !log.username.toLowerCase().includes(filters.username.toLowerCase())) { log: LoginAuditLog,
filters: LoginAuditFilters,
): boolean {
if (
filters.username &&
!log.username.toLowerCase().includes(filters.username.toLowerCase())
) {
return false; return false;
} }
if (filters.success !== undefined && log.success !== filters.success) { if (filters.success !== undefined && log.success !== filters.success) {
return false; return false;
} }
@@ -256,12 +290,12 @@ export class LoginAuditService {
private getDateRange(startDate: Date, endDate: Date): string[] { private getDateRange(startDate: Date, endDate: Date): string[] {
const dates: string[] = []; const dates: string[] = [];
const currentDate = new Date(startDate); const currentDate = new Date(startDate);
while (currentDate <= endDate) { while (currentDate <= endDate) {
dates.push(DateUtil.toBrazilString(currentDate, 'yyyy-MM-dd')); dates.push(DateUtil.toBrazilString(currentDate, 'yyyy-MM-dd'));
currentDate.setDate(currentDate.getDate() + 1); currentDate.setDate(currentDate.getDate() + 1);
} }
return dates; return dates;
} }
} }

View File

@@ -16,11 +16,12 @@ export class RateLimitingService {
blockDurationMs: 1 * 60 * 1000, blockDurationMs: 1 * 60 * 1000,
}; };
constructor( constructor(@Inject(RedisClientToken) private readonly redis: IRedisClient) {}
@Inject(RedisClientToken) private readonly redis: IRedisClient,
) {}
async isAllowed(ip: string, config?: Partial<RateLimitConfig>): Promise<boolean> { async isAllowed(
ip: string,
config?: Partial<RateLimitConfig>,
): Promise<boolean> {
const finalConfig = { ...this.defaultConfig, ...config }; const finalConfig = { ...this.defaultConfig, ...config };
const key = this.buildAttemptKey(ip); const key = this.buildAttemptKey(ip);
const blockKey = this.buildBlockKey(ip); const blockKey = this.buildBlockKey(ip);
@@ -51,21 +52,25 @@ export class RateLimitingService {
return {attempts, 0} return {attempts, 0}
`; `;
const result = await this.redis.eval( const result = (await this.redis.eval(
luaScript, luaScript,
2, 2,
key, key,
blockKey, blockKey,
finalConfig.maxAttempts, finalConfig.maxAttempts,
finalConfig.windowMs, finalConfig.windowMs,
finalConfig.blockDurationMs finalConfig.blockDurationMs,
) as [number, number]; )) as [number, number];
const [attempts, isBlockedResult] = result; const [attempts, isBlockedResult] = result;
return isBlockedResult === 0; return isBlockedResult === 0;
} }
async recordAttempt(ip: string, success: boolean, config?: Partial<RateLimitConfig>): Promise<void> { async recordAttempt(
ip: string,
success: boolean,
config?: Partial<RateLimitConfig>,
): Promise<void> {
const finalConfig = { ...this.defaultConfig, ...config }; const finalConfig = { ...this.defaultConfig, ...config };
const key = this.buildAttemptKey(ip); const key = this.buildAttemptKey(ip);
const blockKey = this.buildBlockKey(ip); const blockKey = this.buildBlockKey(ip);
@@ -98,7 +103,7 @@ export class RateLimitingService {
async clearAttempts(ip: string): Promise<void> { async clearAttempts(ip: string): Promise<void> {
const key = this.buildAttemptKey(ip); const key = this.buildAttemptKey(ip);
const blockKey = this.buildBlockKey(ip); const blockKey = this.buildBlockKey(ip);
await this.redis.del(key); await this.redis.del(key);
await this.redis.del(blockKey); await this.redis.del(blockKey);
} }

View File

@@ -24,18 +24,21 @@ export class RefreshTokenService {
private readonly jwtService: JwtService, private readonly jwtService: JwtService,
) {} ) {}
async generateRefreshToken(userId: number, sessionId?: string): Promise<string> { async generateRefreshToken(
userId: number,
sessionId?: string,
): Promise<string> {
const tokenId = randomBytes(32).toString('hex'); const tokenId = randomBytes(32).toString('hex');
const refreshToken = this.jwtService.sign( const refreshToken = this.jwtService.sign(
{ userId, tokenId, sessionId, type: 'refresh' }, { userId, tokenId, sessionId, type: 'refresh' },
{ expiresIn: '7d' } { expiresIn: '7d' },
); );
const tokenData: RefreshTokenData = { const tokenData: RefreshTokenData = {
userId, userId,
tokenId, tokenId,
sessionId, sessionId,
expiresAt: DateUtil.nowTimestamp() + (this.REFRESH_TOKEN_TTL * 1000), expiresAt: DateUtil.nowTimestamp() + this.REFRESH_TOKEN_TTL * 1000,
createdAt: DateUtil.nowTimestamp(), createdAt: DateUtil.nowTimestamp(),
}; };
@@ -50,7 +53,7 @@ export class RefreshTokenService {
async validateRefreshToken(refreshToken: string): Promise<JwtPayload> { async validateRefreshToken(refreshToken: string): Promise<JwtPayload> {
try { try {
const decoded = this.jwtService.verify(refreshToken) as any; const decoded = this.jwtService.verify(refreshToken) as any;
if (decoded.type !== 'refresh') { if (decoded.type !== 'refresh') {
throw new UnauthorizedException('Token inválido'); throw new UnauthorizedException('Token inválido');
} }
@@ -68,14 +71,14 @@ export class RefreshTokenService {
throw new UnauthorizedException('Refresh token expirado'); throw new UnauthorizedException('Refresh token expirado');
} }
return { return {
id: userId, id: userId,
sellerId: 0, sellerId: 0,
storeId: '', storeId: '',
username: '', username: '',
email: '', email: '',
sessionId: sessionId || tokenData.sessionId, sessionId: sessionId || tokenData.sessionId,
tokenId tokenId,
} as JwtPayload; } as JwtPayload;
} catch (error) { } catch (error) {
throw new UnauthorizedException('Refresh token inválido'); throw new UnauthorizedException('Refresh token inválido');
@@ -90,7 +93,7 @@ export class RefreshTokenService {
async revokeAllRefreshTokens(userId: number): Promise<void> { async revokeAllRefreshTokens(userId: number): Promise<void> {
const pattern = this.buildRefreshTokenPattern(userId); const pattern = this.buildRefreshTokenPattern(userId);
const keys = await this.redis.keys(pattern); const keys = await this.redis.keys(pattern);
if (keys.length > 0) { if (keys.length > 0) {
await this.redis.del(...keys); await this.redis.del(...keys);
} }
@@ -99,9 +102,9 @@ export class RefreshTokenService {
async getActiveRefreshTokens(userId: number): Promise<RefreshTokenData[]> { async getActiveRefreshTokens(userId: number): Promise<RefreshTokenData[]> {
const pattern = this.buildRefreshTokenPattern(userId); const pattern = this.buildRefreshTokenPattern(userId);
const keys = await this.redis.keys(pattern); const keys = await this.redis.keys(pattern);
const tokens: RefreshTokenData[] = []; const tokens: RefreshTokenData[] = [];
for (const key of keys) { for (const key of keys) {
const tokenData = await this.redis.get<RefreshTokenData>(key); const tokenData = await this.redis.get<RefreshTokenData>(key);
if (tokenData && tokenData.expiresAt > DateUtil.nowTimestamp()) { if (tokenData && tokenData.expiresAt > DateUtil.nowTimestamp()) {
@@ -114,11 +117,11 @@ export class RefreshTokenService {
private async limitRefreshTokensPerUser(userId: number): Promise<void> { private async limitRefreshTokensPerUser(userId: number): Promise<void> {
const activeTokens = await this.getActiveRefreshTokens(userId); const activeTokens = await this.getActiveRefreshTokens(userId);
if (activeTokens.length > this.MAX_REFRESH_TOKENS_PER_USER) { if (activeTokens.length > this.MAX_REFRESH_TOKENS_PER_USER) {
const tokensToRemove = activeTokens const tokensToRemove = activeTokens
.slice(this.MAX_REFRESH_TOKENS_PER_USER) .slice(this.MAX_REFRESH_TOKENS_PER_USER)
.map(token => token.tokenId); .map((token) => token.tokenId);
for (const tokenId of tokensToRemove) { for (const tokenId of tokensToRemove) {
await this.revokeRefreshToken(userId, tokenId); await this.revokeRefreshToken(userId, tokenId);

View File

@@ -19,11 +19,13 @@ export class SessionManagementService {
private readonly SESSION_TTL = 8 * 60 * 60; private readonly SESSION_TTL = 8 * 60 * 60;
private readonly MAX_SESSIONS_PER_USER = 1; private readonly MAX_SESSIONS_PER_USER = 1;
constructor( constructor(@Inject(RedisClientToken) private readonly redis: IRedisClient) {}
@Inject(RedisClientToken) private readonly redis: IRedisClient,
) {}
async createSession(userId: number, ipAddress: string, userAgent: string): Promise<SessionData> { async createSession(
userId: number,
ipAddress: string,
userAgent: string,
): Promise<SessionData> {
const sessionId = randomBytes(16).toString('hex'); const sessionId = randomBytes(16).toString('hex');
const now = DateUtil.nowTimestamp(); const now = DateUtil.nowTimestamp();
@@ -45,7 +47,10 @@ export class SessionManagementService {
return sessionData; return sessionData;
} }
async updateSessionActivity(userId: number, sessionId: string): Promise<void> { async updateSessionActivity(
userId: number,
sessionId: string,
): Promise<void> {
const key = this.buildSessionKey(userId, sessionId); const key = this.buildSessionKey(userId, sessionId);
const sessionData = await this.redis.get<SessionData>(key); const sessionData = await this.redis.get<SessionData>(key);
@@ -55,12 +60,15 @@ export class SessionManagementService {
} }
} }
async getActiveSessions(userId: number, currentSessionId?: string): Promise<SessionData[]> { async getActiveSessions(
userId: number,
currentSessionId?: string,
): Promise<SessionData[]> {
const pattern = this.buildSessionPattern(userId); const pattern = this.buildSessionPattern(userId);
const keys = await this.redis.keys(pattern); const keys = await this.redis.keys(pattern);
const sessions: SessionData[] = []; const sessions: SessionData[] = [];
for (const key of keys) { for (const key of keys) {
const sessionData = await this.redis.get<SessionData>(key); const sessionData = await this.redis.get<SessionData>(key);
if (sessionData && sessionData.isActive) { if (sessionData && sessionData.isActive) {
@@ -89,7 +97,7 @@ export class SessionManagementService {
async terminateAllSessions(userId: number): Promise<void> { async terminateAllSessions(userId: number): Promise<void> {
const pattern = this.buildSessionPattern(userId); const pattern = this.buildSessionPattern(userId);
const keys = await this.redis.keys(pattern); const keys = await this.redis.keys(pattern);
for (const key of keys) { for (const key of keys) {
const sessionData = await this.redis.get<SessionData>(key); const sessionData = await this.redis.get<SessionData>(key);
if (sessionData) { if (sessionData) {
@@ -99,10 +107,13 @@ export class SessionManagementService {
} }
} }
async terminateOtherSessions(userId: number, currentSessionId: string): Promise<void> { async terminateOtherSessions(
userId: number,
currentSessionId: string,
): Promise<void> {
const pattern = this.buildSessionPattern(userId); const pattern = this.buildSessionPattern(userId);
const keys = await this.redis.keys(pattern); const keys = await this.redis.keys(pattern);
for (const key of keys) { for (const key of keys) {
const sessionData = await this.redis.get<SessionData>(key); const sessionData = await this.redis.get<SessionData>(key);
if (sessionData && sessionData.sessionId !== currentSessionId) { if (sessionData && sessionData.sessionId !== currentSessionId) {
@@ -115,7 +126,7 @@ export class SessionManagementService {
async isSessionActive(userId: number, sessionId: string): Promise<boolean> { async isSessionActive(userId: number, sessionId: string): Promise<boolean> {
const key = this.buildSessionKey(userId, sessionId); const key = this.buildSessionKey(userId, sessionId);
const sessionData = await this.redis.get<SessionData>(key); const sessionData = await this.redis.get<SessionData>(key);
return sessionData ? sessionData.isActive : false; return sessionData ? sessionData.isActive : false;
} }
@@ -126,11 +137,11 @@ export class SessionManagementService {
private async limitSessionsPerUser(userId: number): Promise<void> { private async limitSessionsPerUser(userId: number): Promise<void> {
const activeSessions = await this.getActiveSessions(userId); const activeSessions = await this.getActiveSessions(userId);
if (activeSessions.length > this.MAX_SESSIONS_PER_USER) { if (activeSessions.length > this.MAX_SESSIONS_PER_USER) {
const sessionsToRemove = activeSessions const sessionsToRemove = activeSessions
.slice(this.MAX_SESSIONS_PER_USER) .slice(this.MAX_SESSIONS_PER_USER)
.map(session => session.sessionId); .map((session) => session.sessionId);
for (const sessionId of sessionsToRemove) { for (const sessionId of sessionsToRemove) {
await this.terminateSession(userId, sessionId); await this.terminateSession(userId, sessionId);

View File

@@ -20,7 +20,7 @@ export class TokenBlacklistService {
const blacklistKey = this.buildBlacklistKey(token); const blacklistKey = this.buildBlacklistKey(token);
const ttl = expiresIn || this.calculateTokenTTL(decoded); const ttl = expiresIn || this.calculateTokenTTL(decoded);
await this.redis.set(blacklistKey, 'blacklisted', ttl); await this.redis.set(blacklistKey, 'blacklisted', ttl);
} catch (error) { } catch (error) {
throw new Error(`Erro ao adicionar token à blacklist: ${error.message}`); throw new Error(`Erro ao adicionar token à blacklist: ${error.message}`);
@@ -45,7 +45,7 @@ export class TokenBlacklistService {
async clearUserBlacklist(userId: number): Promise<void> { async clearUserBlacklist(userId: number): Promise<void> {
const pattern = `auth:blacklist:${userId}:*`; const pattern = `auth:blacklist:${userId}:*`;
const keys = await this.redis.keys(pattern); const keys = await this.redis.keys(pattern);
if (keys.length > 0) { if (keys.length > 0) {
await this.redis.del(...keys); await this.redis.del(...keys);
} }
@@ -59,12 +59,16 @@ export class TokenBlacklistService {
private calculateTokenTTL(payload: JwtPayload): number { private calculateTokenTTL(payload: JwtPayload): number {
const now = Math.floor(Date.now() / 1000); const now = Math.floor(Date.now() / 1000);
const exp = payload.exp || (now + 8 * 60 * 60); const exp = payload.exp || now + 8 * 60 * 60;
return Math.max(0, exp - now); return Math.max(0, exp - now);
} }
private hashToken(token: string): string { private hashToken(token: string): string {
const crypto = require('crypto'); const crypto = require('crypto');
return crypto.createHash('sha256').update(token).digest('hex').substring(0, 16); return crypto
.createHash('sha256')
.update(token)
.digest('hex')
.substring(0, 16);
} }
} }

View File

@@ -1,14 +1,14 @@
/** /**
* Teste para JwtStrategy * Teste para JwtStrategy
* *
* NOTA: Este teste foi escrito seguindo TDD (Test-Driven Development). * NOTA: Este teste foi escrito seguindo TDD (Test-Driven Development).
* O teste falha propositalmente para demonstrar que o método validate * O teste falha propositalmente para demonstrar que o método validate
* não valida corretamente os campos obrigatórios do payload. * não valida corretamente os campos obrigatórios do payload.
* *
* Para executar este teste, é necessário resolver problemas de compatibilidade * Para executar este teste, é necessário resolver problemas de compatibilidade
* entre TypeScript 5.8.3 e ts-jest 26.4.3. Recomenda-se atualizar ts-jest * entre TypeScript 5.8.3 e ts-jest 26.4.3. Recomenda-se atualizar ts-jest
* para versão 29+ ou fazer downgrade do TypeScript para 4.x. * para versão 29+ ou fazer downgrade do TypeScript para 4.x.
* *
* O código de produção já foi corrigido (linhas 32-34 do jwt-strategy.ts). * O código de produção já foi corrigido (linhas 32-34 do jwt-strategy.ts).
*/ */
@@ -19,11 +19,11 @@ describe('JwtStrategy', () => {
/** /**
* Este teste documenta o comportamento esperado quando o método validate * Este teste documenta o comportamento esperado quando o método validate
* recebe um payload inválido ou incompleto. * recebe um payload inválido ou incompleto.
* *
* ANTES DA CORREÇÃO: * ANTES DA CORREÇÃO:
* O método tentava acessar payload.id e payload.sessionId sem validação, * O método tentava acessar payload.id e payload.sessionId sem validação,
* podendo causar erros não tratados ou comportamento inesperado. * podendo causar erros não tratados ou comportamento inesperado.
* *
* DEPOIS DA CORREÇÃO (implementado em jwt-strategy.ts linhas 29-34): * DEPOIS DA CORREÇÃO (implementado em jwt-strategy.ts linhas 29-34):
* O método valida se payload contém id e sessionId antes de prosseguir, * O método valida se payload contém id e sessionId antes de prosseguir,
* lançando UnauthorizedException('Payload inválido ou incompleto') se não. * lançando UnauthorizedException('Payload inválido ou incompleto') se não.
@@ -31,17 +31,17 @@ describe('JwtStrategy', () => {
it('should throw UnauthorizedException when payload is missing required fields', async () => { it('should throw UnauthorizedException when payload is missing required fields', async () => {
/** /**
* Teste de validação de payload * Teste de validação de payload
* *
* Cenário: Payload vazio ou sem campos obrigatórios * Cenário: Payload vazio ou sem campos obrigatórios
* Resultado esperado: UnauthorizedException com mensagem específica * Resultado esperado: UnauthorizedException com mensagem específica
* *
* Casos cobertos: * Casos cobertos:
* 1. Payload completamente vazio: {} * 1. Payload completamente vazio: {}
* 2. Payload apenas com id: { id: 1 } * 2. Payload apenas com id: { id: 1 }
* 3. Payload apenas com sessionId: { sessionId: 'abc' } * 3. Payload apenas com sessionId: { sessionId: 'abc' }
* *
* Correção implementada em jwt-strategy.ts: * Correção implementada em jwt-strategy.ts:
* *
* async validate(payload: JwtPayload, req: any) { * async validate(payload: JwtPayload, req: any) {
* if (!payload?.id || !payload?.sessionId) { * if (!payload?.id || !payload?.sessionId) {
* throw new UnauthorizedException('Payload inválido ou incompleto'); * throw new UnauthorizedException('Payload inválido ou incompleto');
@@ -49,7 +49,7 @@ describe('JwtStrategy', () => {
* // ... resto do código * // ... resto do código
* } * }
*/ */
const testCases = [ const testCases = [
{ payload: {}, description: 'payload vazio' }, { payload: {}, description: 'payload vazio' },
{ payload: { id: 1 }, description: 'payload sem sessionId' }, { payload: { id: 1 }, description: 'payload sem sessionId' },

View File

@@ -31,7 +31,7 @@ export class JwtStrategy extends PassportStrategy(Strategy) {
} }
const token = req.headers?.authorization?.replace('Bearer ', ''); const token = req.headers?.authorization?.replace('Bearer ', '');
if (token && await this.tokenBlacklistService.isBlacklisted(token)) { if (token && (await this.tokenBlacklistService.isBlacklisted(token))) {
throw new UnauthorizedException('Token foi invalidado'); throw new UnauthorizedException('Token foi invalidado');
} }
@@ -39,15 +39,16 @@ export class JwtStrategy extends PassportStrategy(Strategy) {
const cachedUser = await this.redis.get<any>(sessionKey); const cachedUser = await this.redis.get<any>(sessionKey);
if (cachedUser) { if (cachedUser) {
const isSessionActive = await this.sessionManagementService.isSessionActive( const isSessionActive =
payload.id, await this.sessionManagementService.isSessionActive(
payload.sessionId payload.id,
); payload.sessionId,
);
if (!isSessionActive) { if (!isSessionActive) {
throw new UnauthorizedException('Sessão expirada ou inválida'); throw new UnauthorizedException('Sessão expirada ou inválida');
} }
return { return {
id: cachedUser.id, id: cachedUser.id,
sellerId: cachedUser.sellerId, sellerId: cachedUser.sellerId,
@@ -65,7 +66,9 @@ export class JwtStrategy extends PassportStrategy(Strategy) {
} }
if (user.situacao === 'B') { if (user.situacao === 'B') {
throw new UnauthorizedException('Usuário bloqueado, acesso não permitido'); throw new UnauthorizedException(
'Usuário bloqueado, acesso não permitido',
);
} }
const userData = { const userData = {

View File

@@ -5,7 +5,7 @@ import { InjectDataSource } from '@nestjs/typeorm';
@Injectable() @Injectable()
export class UserRepository { export class UserRepository {
constructor( constructor(
@InjectDataSource('oracle') @InjectDataSource('oracle')
private readonly dataSource: DataSource, private readonly dataSource: DataSource,
) {} ) {}
@@ -40,18 +40,18 @@ export class UserRepository {
WHERE REGEXP_REPLACE(PCUSUARI.CPF, '[^0-9]', '') = REGEXP_REPLACE(:1, '[^0-9]', '') WHERE REGEXP_REPLACE(PCUSUARI.CPF, '[^0-9]', '') = REGEXP_REPLACE(:1, '[^0-9]', '')
AND PCUSUARI.EMAIL = :2 AND PCUSUARI.EMAIL = :2
`; `;
const users = await this.dataSource.query(sql, [cpf, email]); const users = await this.dataSource.query(sql, [cpf, email]);
return users[0] || null; return users[0] || null;
} }
async updatePassword(sellerId: number, newPasswordHash: string) { async updatePassword(sellerId: number, newPasswordHash: string) {
const sql = ` const sql = `
UPDATE PCUSUARI SET SENHALOGIN = :1 WHERE CODUSUR = :2 UPDATE PCUSUARI SET SENHALOGIN = :1 WHERE CODUSUR = :2
`; `;
await this.dataSource.query(sql, [newPasswordHash, sellerId]); await this.dataSource.query(sql, [newPasswordHash, sellerId]);
} }
async findByIdAndPassword(sellerId: number, passwordHash: string) { async findByIdAndPassword(sellerId: number, passwordHash: string) {
const sql = ` const sql = `
SELECT CODUSUR as "sellerId", NOME as "name", EMAIL as "email" SELECT CODUSUR as "sellerId", NOME as "name", EMAIL as "email"

View File

@@ -19,10 +19,13 @@ export class ResetPasswordService {
if (!user) return null; if (!user) return null;
const newPassword = Guid.create().toString().substring(0, 8); const newPassword = Guid.create().toString().substring(0, 8);
await this.userRepository.updatePassword(user.sellerId, md5(newPassword).toUpperCase()); await this.userRepository.updatePassword(
user.sellerId,
md5(newPassword).toUpperCase(),
);
await this.emailService.sendPasswordReset(user.email, newPassword); await this.emailService.sendPasswordReset(user.email, newPassword);
return { ...user, newPassword }; return { ...user, newPassword };
} }
} }

View File

@@ -8,11 +8,8 @@ import { EmailService } from './email.service';
import { AuthenticateUserHandler } from '../auth/commands/authenticate-user.service'; import { AuthenticateUserHandler } from '../auth/commands/authenticate-user.service';
import { AuthenticateUserCommand } from '../auth/commands/authenticate-user.command'; import { AuthenticateUserCommand } from '../auth/commands/authenticate-user.command';
@Module({ @Module({
imports: [ imports: [TypeOrmModule.forFeature([])],
TypeOrmModule.forFeature([]),
],
providers: [ providers: [
UsersService, UsersService,
UserRepository, UserRepository,

View File

@@ -4,8 +4,6 @@ import { ResetPasswordService } from './reset-password.service';
import { ChangePasswordService } from './change-password.service'; import { ChangePasswordService } from './change-password.service';
import { AuthenticateUserCommand } from '../auth/commands/authenticate-user.command'; import { AuthenticateUserCommand } from '../auth/commands/authenticate-user.command';
@Injectable() @Injectable()
export class UsersService { export class UsersService {
constructor( constructor(
@@ -22,7 +20,15 @@ export class UsersService {
return this.resetPasswordService.execute(user.document, user.email); return this.resetPasswordService.execute(user.document, user.email);
} }
async changePassword(user: { id: number; password: string; newPassword: string }) { async changePassword(user: {
return this.changePasswordService.execute(user.id, user.password, user.newPassword); id: number;
password: string;
newPassword: string;
}) {
return this.changePasswordService.execute(
user.id,
user.password,
user.newPassword,
);
} }
} }

View File

@@ -7,7 +7,8 @@ import { ConfigService } from '@nestjs/config';
export class RateLimiterMiddleware implements NestMiddleware { export class RateLimiterMiddleware implements NestMiddleware {
private readonly ttl: number; private readonly ttl: number;
private readonly limit: number; private readonly limit: number;
private readonly store: Map<string, { count: number; expiration: number }> = new Map(); private readonly store: Map<string, { count: number; expiration: number }> =
new Map();
constructor(private configService: ConfigService) { constructor(private configService: ConfigService) {
this.ttl = this.configService.get<number>('THROTTLE_TTL', 60); this.ttl = this.configService.get<number>('THROTTLE_TTL', 60);
@@ -22,7 +23,7 @@ export class RateLimiterMiddleware implements NestMiddleware {
const key = this.generateKey(req); const key = this.generateKey(req);
const now = Date.now(); const now = Date.now();
if (!this.store.has(key)) { if (!this.store.has(key)) {
this.store.set(key, { count: 1, expiration: now + this.ttl * 1000 }); this.store.set(key, { count: 1, expiration: now + this.ttl * 1000 });
this.setRateLimitHeaders(res, 1); this.setRateLimitHeaders(res, 1);
@@ -42,7 +43,9 @@ export class RateLimiterMiddleware implements NestMiddleware {
const timeToWait = Math.ceil((record.expiration - now) / 1000); const timeToWait = Math.ceil((record.expiration - now) / 1000);
this.setRateLimitHeaders(res, record.count); this.setRateLimitHeaders(res, record.count);
res.header('Retry-After', String(timeToWait)); res.header('Retry-After', String(timeToWait));
throw new ThrottlerException(`Too Many Requests. Retry after ${timeToWait} seconds.`); throw new ThrottlerException(
`Too Many Requests. Retry after ${timeToWait} seconds.`,
);
} }
record.count++; record.count++;
@@ -52,13 +55,17 @@ export class RateLimiterMiddleware implements NestMiddleware {
private generateKey(req: Request): string { private generateKey(req: Request): string {
// Combina IP com rota para rate limiting mais preciso // Combina IP com rota para rate limiting mais preciso
const ip = req.ip || req.headers['x-forwarded-for'] as string || 'unknown-ip'; const ip =
req.ip || (req.headers['x-forwarded-for'] as string) || 'unknown-ip';
const path = req.path || req.originalUrl || ''; const path = req.path || req.originalUrl || '';
return `${ip}:${path}`; return `${ip}:${path}`;
} }
private setRateLimitHeaders(res: Response, count: number): void { private setRateLimitHeaders(res: Response, count: number): void {
res.header('X-RateLimit-Limit', String(this.limit)); res.header('X-RateLimit-Limit', String(this.limit));
res.header('X-RateLimit-Remaining', String(Math.max(0, this.limit - count))); res.header(
'X-RateLimit-Remaining',
String(Math.max(0, this.limit - count)),
);
} }
} }

View File

@@ -7,20 +7,20 @@ export class RequestSanitizerMiddleware implements NestMiddleware {
if (req.headers) { if (req.headers) {
this.sanitizeObject(req.headers); this.sanitizeObject(req.headers);
} }
if (req.query) { if (req.query) {
this.sanitizeObject(req.query); this.sanitizeObject(req.query);
} }
if (req.body) { if (req.body) {
this.sanitizeObject(req.body); this.sanitizeObject(req.body);
} }
next(); next();
} }
private sanitizeObject(obj: any) { private sanitizeObject(obj: any) {
Object.keys(obj).forEach(key => { Object.keys(obj).forEach((key) => {
if (typeof obj[key] === 'string') { if (typeof obj[key] === 'string') {
obj[key] = this.sanitizeString(obj[key]); obj[key] = this.sanitizeString(obj[key]);
} else if (typeof obj[key] === 'object' && obj[key] !== null) { } else if (typeof obj[key] === 'object' && obj[key] !== null) {
@@ -32,17 +32,17 @@ export class RequestSanitizerMiddleware implements NestMiddleware {
private sanitizeString(str: string): string { private sanitizeString(str: string): string {
// Remover tags HTML básicas // Remover tags HTML básicas
str = str.replace(/<(|\/|[^>\/bi]|\/[^>bi]|[^\/>][^>]+|\/[^>][^>]+)>/g, ''); str = str.replace(/<(|\/|[^>\/bi]|\/[^>bi]|[^\/>][^>]+|\/[^>][^>]+)>/g, '');
// Remover scripts JavaScript // Remover scripts JavaScript
str = str.replace(/javascript:/g, ''); str = str.replace(/javascript:/g, '');
str = str.replace(/on\w+=/g, ''); str = str.replace(/on\w+=/g, '');
// Remover comentários HTML // Remover comentários HTML
str = str.replace(/<!--[\s\S]*?-->/g, ''); str = str.replace(/<!--[\s\S]*?-->/g, '');
// Sanitizar caracteres especiais para evitar SQL injection // Sanitizar caracteres especiais para evitar SQL injection
str = str.replace(/'/g, "''"); str = str.replace(/'/g, "''");
return str; return str;
} }
} }

View File

@@ -1,21 +1,25 @@
import { import {
CallHandler, CallHandler,
ExecutionContext, ExecutionContext,
Injectable, Injectable,
NestInterceptor, NestInterceptor,
} from '@nestjs/common'; } from '@nestjs/common';
import { Observable } from 'rxjs'; import { Observable } from 'rxjs';
import { map } from 'rxjs/operators'; import { map } from 'rxjs/operators';
import { ResultModel } from '../shared/ResultModel'; import { ResultModel } from '../shared/ResultModel';
@Injectable() @Injectable()
export class ResponseInterceptor<T> implements NestInterceptor<T, ResultModel<T>> { export class ResponseInterceptor<T>
intercept(context: ExecutionContext, next: CallHandler<T>): Observable<ResultModel<T>> { implements NestInterceptor<T, ResultModel<T>>
return next.handle().pipe( {
map((data) => { intercept(
return ResultModel.success(data); context: ExecutionContext,
}), next: CallHandler<T>,
); ): Observable<ResultModel<T>> {
} return next.handle().pipe(
map((data) => {
return ResultModel.success(data);
}),
);
} }
}

View File

@@ -1,8 +1,12 @@
import { registerDecorator, ValidationOptions, ValidationArguments } from 'class-validator'; import {
registerDecorator,
ValidationOptions,
ValidationArguments,
} from 'class-validator';
// Decorator para sanitizar strings e prevenir SQL/NoSQL injection // Decorator para sanitizar strings e prevenir SQL/NoSQL injection
export function IsSanitized(validationOptions?: ValidationOptions) { export function IsSanitized(validationOptions?: ValidationOptions) {
return function (object: Object, propertyName: string) { return function (object: object, propertyName: string) {
registerDecorator({ registerDecorator({
name: 'isSanitized', name: 'isSanitized',
target: object.constructor, target: object.constructor,
@@ -11,24 +15,27 @@ export function IsSanitized(validationOptions?: ValidationOptions) {
validator: { validator: {
validate(value: any, args: ValidationArguments) { validate(value: any, args: ValidationArguments) {
if (typeof value !== 'string') return true; // Skip non-string values if (typeof value !== 'string') return true; // Skip non-string values
const sqlInjectionRegex = /('|"|;|--|\/\*|\*\/|@@|@|char|nchar|varchar|nvarchar|alter|begin|cast|create|cursor|declare|delete|drop|end|exec|execute|fetch|insert|kill|open|select|sys|sysobjects|syscolumns|table|update|xp_)/i; const sqlInjectionRegex =
/('|"|;|--|\/\*|\*\/|@@|@|char|nchar|varchar|nvarchar|alter|begin|cast|create|cursor|declare|delete|drop|end|exec|execute|fetch|insert|kill|open|select|sys|sysobjects|syscolumns|table|update|xp_)/i;
if (sqlInjectionRegex.test(value)) { if (sqlInjectionRegex.test(value)) {
return false; return false;
} }
// Check for NoSQL injection patterns (MongoDB) // Check for NoSQL injection patterns (MongoDB)
const noSqlInjectionRegex = /(\$where|\$ne|\$gt|\$lt|\$gte|\$lte|\$in|\$nin|\$or|\$and|\$regex|\$options|\$elemMatch|\{.*\:.*\})/i; const noSqlInjectionRegex =
/(\$where|\$ne|\$gt|\$lt|\$gte|\$lte|\$in|\$nin|\$or|\$and|\$regex|\$options|\$elemMatch|\{.*\:.*\})/i;
if (noSqlInjectionRegex.test(value)) { if (noSqlInjectionRegex.test(value)) {
return false; return false;
} }
// Check for XSS attempts // Check for XSS attempts
const xssRegex = /(<script|javascript:|on\w+\s*=|<%=|<img|<iframe|alert\(|window\.|document\.)/i; const xssRegex =
/(<script|javascript:|on\w+\s*=|<%=|<img|<iframe|alert\(|window\.|document\.)/i;
if (xssRegex.test(value)) { if (xssRegex.test(value)) {
return false; return false;
} }
return true; return true;
}, },
defaultMessage(args: ValidationArguments) { defaultMessage(args: ValidationArguments) {
@@ -41,7 +48,7 @@ export function IsSanitized(validationOptions?: ValidationOptions) {
// Decorator para validar IDs seguros (evita injeção em IDs) // Decorator para validar IDs seguros (evita injeção em IDs)
export function IsSecureId(validationOptions?: ValidationOptions) { export function IsSecureId(validationOptions?: ValidationOptions) {
return function (object: Object, propertyName: string) { return function (object: object, propertyName: string) {
registerDecorator({ registerDecorator({
name: 'isSecureId', name: 'isSecureId',
target: object.constructor, target: object.constructor,
@@ -49,13 +56,16 @@ export function IsSecureId(validationOptions?: ValidationOptions) {
options: validationOptions, options: validationOptions,
validator: { validator: {
validate(value: any, args: ValidationArguments) { validate(value: any, args: ValidationArguments) {
if (typeof value !== 'string' && typeof value !== 'number') return false; if (typeof value !== 'string' && typeof value !== 'number')
return false;
if (typeof value === 'string') { if (typeof value === 'string') {
// Permitir apenas: letras, números, hífens, underscores e GUIDs // Permitir apenas: letras, números, hífens, underscores e GUIDs
return /^[a-zA-Z0-9\-_]+$|^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/i.test(value); return /^[a-zA-Z0-9\-_]+$|^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/i.test(
value,
);
} }
// Se for número, deve ser positivo // Se for número, deve ser positivo
return value > 0; return value > 0;
}, },
@@ -65,4 +75,4 @@ export function IsSecureId(validationOptions?: ValidationOptions) {
}, },
}); });
}; };
} }

View File

@@ -1,10 +1,13 @@
export interface IRedisClient { export interface IRedisClient {
get<T>(key: string): Promise<T | null>; get<T>(key: string): Promise<T | null>;
set<T>(key: string, value: T, ttlSeconds?: number): Promise<void>; set<T>(key: string, value: T, ttlSeconds?: number): Promise<void>;
del(key: string): Promise<void>; del(key: string): Promise<void>;
del(...keys: string[]): Promise<void>; del(...keys: string[]): Promise<void>;
keys(pattern: string): Promise<string[]>; keys(pattern: string): Promise<string[]>;
ttl(key: string): Promise<number>; ttl(key: string): Promise<number>;
eval(script: string, numKeys: number, ...keysAndArgs: (string | number)[]): Promise<any>; eval(
} script: string,
numKeys: number,
...keysAndArgs: (string | number)[]
): Promise<any>;
}

View File

@@ -1,4 +1,3 @@
import { RedisClientAdapter } from './redis-client.adapter'; import { RedisClientAdapter } from './redis-client.adapter';
export const RedisClientToken = 'RedisClientInterface'; export const RedisClientToken = 'RedisClientInterface';

View File

@@ -6,13 +6,13 @@ import { IRedisClient } from './IRedisClient';
export class RedisClientAdapter implements IRedisClient { export class RedisClientAdapter implements IRedisClient {
constructor( constructor(
@Inject('REDIS_CLIENT') @Inject('REDIS_CLIENT')
private readonly redis: Redis private readonly redis: Redis,
) {} ) {}
async get<T>(key: string): Promise<T | null> { async get<T>(key: string): Promise<T | null> {
const data = await this.redis.get(key); const data = await this.redis.get(key);
if (!data) return null; if (!data) return null;
try { try {
return JSON.parse(data); return JSON.parse(data);
} catch (error) { } catch (error) {
@@ -43,7 +43,11 @@ export class RedisClientAdapter implements IRedisClient {
return this.redis.ttl(key); return this.redis.ttl(key);
} }
async eval(script: string, numKeys: number, ...keysAndArgs: (string | number)[]): Promise<any> { async eval(
script: string,
numKeys: number,
...keysAndArgs: (string | number)[]
): Promise<any> {
return this.redis.eval(script, numKeys, ...keysAndArgs); return this.redis.eval(script, numKeys, ...keysAndArgs);
} }
} }

View File

@@ -9,4 +9,4 @@ import { RedisClientAdapterProvider } from './redis-client.adapter.provider';
providers: [RedisProvider, RedisClientAdapterProvider], providers: [RedisProvider, RedisClientAdapterProvider],
exports: [RedisProvider, RedisClientAdapterProvider], exports: [RedisProvider, RedisClientAdapterProvider],
}) })
export class RedisModule {} export class RedisModule {}

View File

@@ -1,21 +1,21 @@
import { Provider } from '@nestjs/common'; import { Provider } from '@nestjs/common';
import Redis from 'ioredis'; import Redis from 'ioredis';
import { ConfigService } from '@nestjs/config'; import { ConfigService } from '@nestjs/config';
export const RedisProvider: Provider = { export const RedisProvider: Provider = {
provide: 'REDIS_CLIENT', provide: 'REDIS_CLIENT',
useFactory: (configService: ConfigService) => { useFactory: (configService: ConfigService) => {
const redis = new Redis({ const redis = new Redis({
host: configService.get<string>('REDIS_HOST', '10.1.1.109'), host: configService.get<string>('REDIS_HOST', '10.1.1.109'),
port: configService.get<number>('REDIS_PORT', 6379), port: configService.get<number>('REDIS_PORT', 6379),
password: configService.get<string>('REDIS_PASSWORD', '1234'), password: configService.get<string>('REDIS_PASSWORD', '1234'),
}); });
redis.on('error', (err) => { redis.on('error', (err) => {
console.error('Erro ao conectar ao Redis:', err); console.error('Erro ao conectar ao Redis:', err);
}); });
return redis; return redis;
}, },
inject: [ConfigService], inject: [ConfigService],
}; };

View File

@@ -1,16 +1,16 @@
import { registerAs } from '@nestjs/config'; import { registerAs } from '@nestjs/config';
export const databaseConfig = registerAs('database', () => ({ export const databaseConfig = registerAs('database', () => ({
oracle: { oracle: {
connectString: `(DESCRIPTION = (ADDRESS_LIST = (ADDRESS = (PROTOCOL = TCP)(HOST = ${process.env.ORACLE_HOST})(PORT = ${process.env.ORACLE_PORT})))(CONNECT_DATA = (SERVICE_NAME = ${process.env.ORACLE_SERVICE})))`, connectString: `(DESCRIPTION = (ADDRESS_LIST = (ADDRESS = (PROTOCOL = TCP)(HOST = ${process.env.ORACLE_HOST})(PORT = ${process.env.ORACLE_PORT})))(CONNECT_DATA = (SERVICE_NAME = ${process.env.ORACLE_SERVICE})))`,
username: process.env.ORACLE_USER, username: process.env.ORACLE_USER,
password: process.env.ORACLE_PASSWORD, password: process.env.ORACLE_PASSWORD,
}, },
postgres: { postgres: {
host: process.env.POSTGRES_HOST, host: process.env.POSTGRES_HOST,
port: parseInt(process.env.POSTGRES_PORT || '5432', 10), port: parseInt(process.env.POSTGRES_PORT || '5432', 10),
username: process.env.POSTGRES_USER, username: process.env.POSTGRES_USER,
password: process.env.POSTGRES_PASSWORD, password: process.env.POSTGRES_PASSWORD,
database: process.env.POSTGRES_DB, database: process.env.POSTGRES_DB,
}, },
})); }));

View File

@@ -2,8 +2,6 @@ import { DataSourceOptions } from 'typeorm';
import { ConfigService } from '@nestjs/config'; import { ConfigService } from '@nestjs/config';
import * as oracledb from 'oracledb'; import * as oracledb from 'oracledb';
oracledb.initOracleClient({ libDir: process.env.ORACLE_CLIENT_LIB_DIR }); oracledb.initOracleClient({ libDir: process.env.ORACLE_CLIENT_LIB_DIR });
// Definir a estratégia de pool padrão para Oracle // Definir a estratégia de pool padrão para Oracle
@@ -12,19 +10,22 @@ oracledb.queueTimeout = 60000; // timeout da fila em milissegundos
oracledb.poolIncrement = 1; // incremental de conexões oracledb.poolIncrement = 1; // incremental de conexões
export function createOracleConfig(config: ConfigService): DataSourceOptions { export function createOracleConfig(config: ConfigService): DataSourceOptions {
const poolMin = parseInt(config.get('ORACLE_POOL_MIN', '5')); const poolMin = parseInt(config.get('ORACLE_POOL_MIN', '5'));
const poolMax = parseInt(config.get('ORACLE_POOL_MAX', '20')); const poolMax = parseInt(config.get('ORACLE_POOL_MAX', '20'));
const poolIncrement = parseInt(config.get('ORACLE_POOL_INCREMENT', '5')); const poolIncrement = parseInt(config.get('ORACLE_POOL_INCREMENT', '5'));
const poolTimeout = parseInt(config.get('ORACLE_POOL_TIMEOUT', '30000')); const poolTimeout = parseInt(config.get('ORACLE_POOL_TIMEOUT', '30000'));
const idleTimeout = parseInt(config.get('ORACLE_POOL_IDLE_TIMEOUT', '300000')); const idleTimeout = parseInt(
config.get('ORACLE_POOL_IDLE_TIMEOUT', '300000'),
);
const validPoolMin = Math.max(1, poolMin); const validPoolMin = Math.max(1, poolMin);
const validPoolMax = Math.max(validPoolMin + 1, poolMax); const validPoolMax = Math.max(validPoolMin + 1, poolMax);
const validPoolIncrement = Math.max(1, poolIncrement); const validPoolIncrement = Math.max(1, poolIncrement);
if (validPoolMax <= validPoolMin) { if (validPoolMax <= validPoolMin) {
console.warn('Warning: poolMax deve ser maior que poolMin. Ajustando poolMax para poolMin + 1'); console.warn(
'Warning: poolMax deve ser maior que poolMin. Ajustando poolMax para poolMin + 1',
);
} }
const options: DataSourceOptions = { const options: DataSourceOptions = {

View File

@@ -5,17 +5,23 @@ export function createPostgresConfig(config: ConfigService): DataSourceOptions {
// Obter configurações de ambiente ou usar valores padrão // Obter configurações de ambiente ou usar valores padrão
const poolMin = parseInt(config.get('POSTGRES_POOL_MIN', '5')); const poolMin = parseInt(config.get('POSTGRES_POOL_MIN', '5'));
const poolMax = parseInt(config.get('POSTGRES_POOL_MAX', '20')); const poolMax = parseInt(config.get('POSTGRES_POOL_MAX', '20'));
const idleTimeout = parseInt(config.get('POSTGRES_POOL_IDLE_TIMEOUT', '30000')); const idleTimeout = parseInt(
const connectionTimeout = parseInt(config.get('POSTGRES_POOL_CONNECTION_TIMEOUT', '5000')); config.get('POSTGRES_POOL_IDLE_TIMEOUT', '30000'),
const acquireTimeout = parseInt(config.get('POSTGRES_POOL_ACQUIRE_TIMEOUT', '60000')); );
const connectionTimeout = parseInt(
config.get('POSTGRES_POOL_CONNECTION_TIMEOUT', '5000'),
);
const acquireTimeout = parseInt(
config.get('POSTGRES_POOL_ACQUIRE_TIMEOUT', '60000'),
);
// Validação de valores mínimos // Validação de valores mínimos
const validPoolMin = Math.max(1, poolMin); const validPoolMin = Math.max(1, poolMin);
const validPoolMax = Math.max(validPoolMin + 1, poolMax); const validPoolMax = Math.max(validPoolMin + 1, poolMax);
const validIdleTimeout = Math.max(1000, idleTimeout); const validIdleTimeout = Math.max(1000, idleTimeout);
const validConnectionTimeout = Math.max(1000, connectionTimeout); const validConnectionTimeout = Math.max(1000, connectionTimeout);
const validAcquireTimeout = Math.max(1000, acquireTimeout); const validAcquireTimeout = Math.max(1000, acquireTimeout);
const options: DataSourceOptions = { const options: DataSourceOptions = {
type: 'postgres', type: 'postgres',
host: config.get('POSTGRES_HOST'), host: config.get('POSTGRES_HOST'),
@@ -25,7 +31,10 @@ export function createPostgresConfig(config: ConfigService): DataSourceOptions {
database: config.get('POSTGRES_DB'), database: config.get('POSTGRES_DB'),
synchronize: config.get('NODE_ENV') === 'development', synchronize: config.get('NODE_ENV') === 'development',
entities: [__dirname + '/../**/*.entity.{ts,js}'], entities: [__dirname + '/../**/*.entity.{ts,js}'],
ssl: config.get('NODE_ENV') === 'production' ? { rejectUnauthorized: false } : false, ssl:
config.get('NODE_ENV') === 'production'
? { rejectUnauthorized: false }
: false,
logging: config.get('NODE_ENV') === 'development', logging: config.get('NODE_ENV') === 'development',
poolSize: validPoolMax, // máximo de conexões no pool poolSize: validPoolMax, // máximo de conexões no pool
extra: { extra: {

View File

@@ -1 +1 @@
export const DATA_SOURCE = 'DATA_SOURCE'; export const DATA_SOURCE = 'DATA_SOURCE';

View File

@@ -20,4 +20,4 @@ import { createOracleConfig } from '../configs/typeorm.oracle.config';
], ],
exports: [DATA_SOURCE], exports: [DATA_SOURCE],
}) })
export class DatabaseModule {} export class DatabaseModule {}

View File

@@ -1,11 +0,0 @@
/* eslint-disable prettier/prettier */
/* eslint-disable @typescript-eslint/no-unused-vars */
/*
https://docs.nestjs.com/controllers#controllers
*/
import { Controller } from '@nestjs/common';
@Controller()
export class NegotiationsController { }

View File

@@ -1,19 +0,0 @@
/* eslint-disable prettier/prettier */
/* eslint-disable @typescript-eslint/no-unused-vars */
/*
https://docs.nestjs.com/modules
*/
import { Module } from '@nestjs/common';
import { NegotiationsController } from './negotiations.controller';
import { NegotiationsService } from './negotiations.service';
@Module({
imports: [],
controllers: [
NegotiationsController,],
providers: [
NegotiationsService,],
})
export class NegotiationsModule { }

View File

@@ -1,11 +0,0 @@
/* eslint-disable prettier/prettier */
/* eslint-disable @typescript-eslint/no-unused-vars */
/*
https://docs.nestjs.com/providers#services
*/
import { Injectable } from '@nestjs/common';
@Injectable()
export class NegotiationsService { }

View File

@@ -1,17 +0,0 @@
/* eslint-disable prettier/prettier */
/* eslint-disable @typescript-eslint/no-unused-vars */
/*
https://docs.nestjs.com/modules
*/
import { Module } from '@nestjs/common';
import { OccurrencesService } from './occurrences.service';
@Module({
imports: [],
controllers: [],
providers: [
OccurrencesService,],
})
export class OccurrencesModule { }

View File

@@ -1,10 +0,0 @@
/* eslint-disable prettier/prettier */
/* eslint-disable @typescript-eslint/no-unused-vars */
/*
https://docs.nestjs.com/providers#services
*/
import { Injectable } from '@nestjs/common';
@Injectable()
export class OccurrencesService { }

View File

@@ -1,10 +0,0 @@
/* eslint-disable prettier/prettier */
/* eslint-disable @typescript-eslint/no-unused-vars */
/*
https://docs.nestjs.com/controllers#controllers
*/
import { Controller } from '@nestjs/common';
@Controller()
export class OcorrencesController { }

View File

@@ -1,37 +0,0 @@
/* eslint-disable prettier/prettier */
/* eslint-disable @typescript-eslint/no-unused-vars */
/*
https://docs.nestjs.com/controllers#controllers
*/
import { Controller, Delete, Get, Param, Post, Put, UseGuards } from '@nestjs/common';
import { ApiTags, ApiBearerAuth } from '@nestjs/swagger';
import { JwtAuthGuard } from 'src/auth/guards/jwt-auth.guard';
@ApiTags('CRM - Reason Table')
@ApiBearerAuth()
@UseGuards(JwtAuthGuard)
@Controller('api/v1/crm/reason')
export class ReasonTableController {
@Get()
async getReasons() {
return null;
}
@Post()
async createReasons() {
return null;
}
@Put('/:id')
async updateReasons(@Param('id') id: number) {
return null;
}
@Delete('/:id')
async deleteReasons(@Param('id') id: number) {
return null;
}
}

View File

@@ -1,19 +0,0 @@
/* eslint-disable prettier/prettier */
/* eslint-disable @typescript-eslint/no-unused-vars */
/*
https://docs.nestjs.com/modules
*/
import { Module } from '@nestjs/common';
import { ReasonTableController } from './reason-table.controller';
import { ReasonTableService } from './reason-table.service';
@Module({
imports: [],
controllers: [
ReasonTableController,],
providers: [
ReasonTableService,],
})
export class ReasonTableModule { }

View File

@@ -1,10 +0,0 @@
/* eslint-disable prettier/prettier */
/* eslint-disable @typescript-eslint/no-unused-vars *//*
https://docs.nestjs.com/providers#services
*/
import { Injectable } from '@nestjs/common';
@Injectable()
export class ReasonTableService { }

View File

@@ -1,50 +1,53 @@
import { Test, TestingModule } from '@nestjs/testing'; import { Test, TestingModule } from '@nestjs/testing';
import { Logger } from '@nestjs/common';
import { DataConsultService } from '../data-consult.service'; import { DataConsultService } from '../data-consult.service';
import { DataConsultRepository } from '../data-consult.repository'; import { DataConsultRepository } from '../data-consult.repository';
import { ILogger } from '../../Log/ILogger';
import { IRedisClient } from '../../core/configs/cache/IRedisClient'; import { IRedisClient } from '../../core/configs/cache/IRedisClient';
import { RedisClientToken } from '../../core/configs/cache/redis-client.adapter.provider'; import { RedisClientToken } from '../../core/configs/cache/redis-client.adapter.provider';
import { DataSource } from 'typeorm'; import { DataSource } from 'typeorm';
import { DATA_SOURCE } from '../../core/constants'; import { DATA_SOURCE } from '../../core/constants';
export const createMockRepository = (methods: Partial<DataConsultRepository> = {}) => ({ export const createMockRepository = (
findStores: jest.fn(), methods: Partial<DataConsultRepository> = {},
findSellers: jest.fn(), ) =>
findBillings: jest.fn(), ({
findCustomers: jest.fn(), findStores: jest.fn(),
findAllProducts: jest.fn(), findSellers: jest.fn(),
findAllCarriers: jest.fn(), findBillings: jest.fn(),
findRegions: jest.fn(), findCustomers: jest.fn(),
...methods, findProducts: jest.fn(),
} as any); findProductsByCodauxiliar: jest.fn(),
findAllProducts: jest.fn(),
findAllCarriers: jest.fn(),
findRegions: jest.fn(),
...methods,
} as any);
export const createMockLogger = () => ({ export const createMockRedisClient = () =>
log: jest.fn(), ({
error: jest.fn(), get: jest.fn().mockResolvedValue(null),
warn: jest.fn(), set: jest.fn().mockResolvedValue(undefined),
debug: jest.fn(), } as any);
} as any);
export const createMockRedisClient = () => ({
get: jest.fn().mockResolvedValue(null),
set: jest.fn().mockResolvedValue(undefined),
} as any);
export interface DataConsultServiceTestContext { export interface DataConsultServiceTestContext {
service: DataConsultService; service: DataConsultService;
mockRepository: jest.Mocked<DataConsultRepository>; mockRepository: jest.Mocked<DataConsultRepository>;
mockLogger: jest.Mocked<ILogger>;
mockRedisClient: jest.Mocked<IRedisClient>; mockRedisClient: jest.Mocked<IRedisClient>;
mockDataSource: jest.Mocked<DataSource>; mockDataSource: jest.Mocked<DataSource>;
mockLogger: {
error: jest.Mock;
};
} }
export async function createDataConsultServiceTestModule( export async function createDataConsultServiceTestModule(
repositoryMethods: Partial<DataConsultRepository> = {}, repositoryMethods: Partial<DataConsultRepository> = {},
redisClientMethods: Partial<IRedisClient> = {} redisClientMethods: Partial<IRedisClient> = {},
): Promise<DataConsultServiceTestContext> { ): Promise<DataConsultServiceTestContext> {
const mockRepository = createMockRepository(repositoryMethods); const mockRepository = createMockRepository(repositoryMethods);
const mockLogger = createMockLogger(); const mockRedisClient = {
const mockRedisClient = { ...createMockRedisClient(), ...redisClientMethods } as any; ...createMockRedisClient(),
...redisClientMethods,
} as any;
const mockDataSource = {} as any; const mockDataSource = {} as any;
const module: TestingModule = await Test.createTestingModule({ const module: TestingModule = await Test.createTestingModule({
@@ -58,10 +61,6 @@ export async function createDataConsultServiceTestModule(
provide: RedisClientToken, provide: RedisClientToken,
useValue: mockRedisClient, useValue: mockRedisClient,
}, },
{
provide: 'LoggerService',
useValue: mockLogger,
},
{ {
provide: DATA_SOURCE, provide: DATA_SOURCE,
useValue: mockDataSource, useValue: mockDataSource,
@@ -71,12 +70,21 @@ export async function createDataConsultServiceTestModule(
const service = module.get<DataConsultService>(DataConsultService); const service = module.get<DataConsultService>(DataConsultService);
const mockLogger = {
error: jest.fn(),
};
jest.spyOn(Logger.prototype, 'error').mockImplementation(
(message: any, ...optionalParams: any[]) => {
mockLogger.error(message, ...optionalParams);
},
);
return { return {
service, service,
mockRepository, mockRepository,
mockLogger,
mockRedisClient, mockRedisClient,
mockDataSource, mockDataSource,
mockLogger,
}; };
} }

View File

@@ -23,7 +23,7 @@ describe('DataConsultService', () => {
const result = await context.service.stores(); const result = await context.service.stores();
result.forEach(store => { result.forEach((store) => {
expect(store.id).toBeDefined(); expect(store.id).toBeDefined();
expect(store.name).toBeDefined(); expect(store.name).toBeDefined();
expect(store.store).toBeDefined(); expect(store.store).toBeDefined();
@@ -36,7 +36,10 @@ describe('DataConsultService', () => {
}); });
it('should validate that repository result is an array', async () => { it('should validate that repository result is an array', async () => {
context.mockRepository.findStores.mockResolvedValue({ id: '001', name: 'Loja 1' } as any); context.mockRepository.findStores.mockResolvedValue({
id: '001',
name: 'Loja 1',
} as any);
const result = await context.service.stores(); const result = await context.service.stores();
expect(Array.isArray(result)).toBe(true); expect(Array.isArray(result)).toBe(true);
}); });
@@ -49,7 +52,7 @@ describe('DataConsultService', () => {
] as any); ] as any);
const result = await context.service.stores(); const result = await context.service.stores();
result.forEach(store => { result.forEach((store) => {
expect(store.id).not.toBe(''); expect(store.id).not.toBe('');
expect(store.name).not.toBe(''); expect(store.name).not.toBe('');
expect(store.store).not.toBe(''); expect(store.store).not.toBe('');
@@ -60,7 +63,10 @@ describe('DataConsultService', () => {
const repositoryError = new Error('Database connection failed'); const repositoryError = new Error('Database connection failed');
context.mockRepository.findStores.mockRejectedValue(repositoryError); context.mockRepository.findStores.mockRejectedValue(repositoryError);
await expect(context.service.stores()).rejects.toThrow(HttpException); await expect(context.service.stores()).rejects.toThrow(HttpException);
expect(context.mockLogger.error).toHaveBeenCalledWith('Erro ao buscar lojas', repositoryError); expect(context.mockLogger.error).toHaveBeenCalledWith(
'Erro ao buscar lojas',
repositoryError,
);
}); });
}); });
}); });
@@ -85,7 +91,7 @@ describe('DataConsultService', () => {
] as any); ] as any);
const result = await context.service.sellers(); const result = await context.service.sellers();
result.forEach(seller => { result.forEach((seller) => {
expect(seller.id).toBeDefined(); expect(seller.id).toBeDefined();
expect(seller.name).toBeDefined(); expect(seller.name).toBeDefined();
}); });
@@ -97,7 +103,10 @@ describe('DataConsultService', () => {
}); });
it('should validate that repository result is an array', async () => { it('should validate that repository result is an array', async () => {
context.mockRepository.findSellers.mockResolvedValue({ id: '001', name: 'Vendedor 1' } as any); context.mockRepository.findSellers.mockResolvedValue({
id: '001',
name: 'Vendedor 1',
} as any);
const result = await context.service.sellers(); const result = await context.service.sellers();
expect(Array.isArray(result)).toBe(true); expect(Array.isArray(result)).toBe(true);
}); });
@@ -109,17 +118,37 @@ describe('DataConsultService', () => {
] as any); ] as any);
const result = await context.service.sellers(); const result = await context.service.sellers();
result.forEach(seller => { result.forEach((seller) => {
expect(seller.id).not.toBe(''); expect(seller.id).not.toBe('');
expect(seller.name).not.toBe(''); expect(seller.name).not.toBe('');
}); });
}); });
it('should filter out sellers with null id', async () => {
context.mockRepository.findSellers.mockResolvedValue([
{ id: null, name: 'Vendedor 1' },
{ id: '002', name: 'Vendedor 2' },
{ id: null, name: 'Vendedor 3' },
] as any);
const result = await context.service.sellers();
expect(result).toHaveLength(1);
expect(result[0].id).toBe('002');
expect(result[0].name).toBe('Vendedor 2');
result.forEach((seller) => {
expect(seller.id).not.toBeNull();
expect(seller.id).toBeDefined();
});
});
it('should log error when repository throws exception', async () => { it('should log error when repository throws exception', async () => {
const repositoryError = new Error('Database connection failed'); const repositoryError = new Error('Database connection failed');
context.mockRepository.findSellers.mockRejectedValue(repositoryError); context.mockRepository.findSellers.mockRejectedValue(repositoryError);
await expect(context.service.sellers()).rejects.toThrow(HttpException); await expect(context.service.sellers()).rejects.toThrow(HttpException);
expect(context.mockLogger.error).toHaveBeenCalledWith('Erro ao buscar vendedores', repositoryError); expect(context.mockLogger.error).toHaveBeenCalledWith(
'Erro ao buscar vendedores',
repositoryError,
);
}); });
}); });
}); });
@@ -144,7 +173,7 @@ describe('DataConsultService', () => {
] as any); ] as any);
const result = await context.service.billings(); const result = await context.service.billings();
result.forEach(billing => { result.forEach((billing) => {
expect(billing.id).toBeDefined(); expect(billing.id).toBeDefined();
expect(billing.date).toBeDefined(); expect(billing.date).toBeDefined();
expect(billing.total).toBeDefined(); expect(billing.total).toBeDefined();
@@ -157,7 +186,11 @@ describe('DataConsultService', () => {
}); });
it('should validate that repository result is an array', async () => { it('should validate that repository result is an array', async () => {
context.mockRepository.findBillings.mockResolvedValue({ id: '001', date: new Date(), total: 1000 } as any); context.mockRepository.findBillings.mockResolvedValue({
id: '001',
date: new Date(),
total: 1000,
} as any);
const result = await context.service.billings(); const result = await context.service.billings();
expect(Array.isArray(result)).toBe(true); expect(Array.isArray(result)).toBe(true);
}); });
@@ -170,7 +203,7 @@ describe('DataConsultService', () => {
] as any); ] as any);
const result = await context.service.billings(); const result = await context.service.billings();
result.forEach(billing => { result.forEach((billing) => {
expect(billing.id).not.toBe(''); expect(billing.id).not.toBe('');
expect(billing.date).toBeDefined(); expect(billing.date).toBeDefined();
expect(billing.total).toBeDefined(); expect(billing.total).toBeDefined();
@@ -181,7 +214,10 @@ describe('DataConsultService', () => {
const repositoryError = new Error('Database connection failed'); const repositoryError = new Error('Database connection failed');
context.mockRepository.findBillings.mockRejectedValue(repositoryError); context.mockRepository.findBillings.mockRejectedValue(repositoryError);
await expect(context.service.billings()).rejects.toThrow(HttpException); await expect(context.service.billings()).rejects.toThrow(HttpException);
expect(context.mockLogger.error).toHaveBeenCalledWith('Erro ao buscar faturamento', repositoryError); expect(context.mockLogger.error).toHaveBeenCalledWith(
'Erro ao buscar faturamento',
repositoryError,
);
}); });
}); });
}); });
@@ -206,7 +242,7 @@ describe('DataConsultService', () => {
] as any); ] as any);
const result = await context.service.customers('test'); const result = await context.service.customers('test');
result.forEach(customer => { result.forEach((customer) => {
expect(customer.id).toBeDefined(); expect(customer.id).toBeDefined();
expect(customer.name).toBeDefined(); expect(customer.name).toBeDefined();
expect(customer.document).toBeDefined(); expect(customer.document).toBeDefined();
@@ -219,7 +255,11 @@ describe('DataConsultService', () => {
}); });
it('should validate that repository result is an array', async () => { it('should validate that repository result is an array', async () => {
context.mockRepository.findCustomers.mockResolvedValue({ id: '001', name: 'Cliente 1', document: '12345678900' } as any); context.mockRepository.findCustomers.mockResolvedValue({
id: '001',
name: 'Cliente 1',
document: '12345678900',
} as any);
const result = await context.service.customers('test'); const result = await context.service.customers('test');
expect(Array.isArray(result)).toBe(true); expect(Array.isArray(result)).toBe(true);
}); });
@@ -232,7 +272,7 @@ describe('DataConsultService', () => {
] as any); ] as any);
const result = await context.service.customers('test'); const result = await context.service.customers('test');
result.forEach(customer => { result.forEach((customer) => {
expect(customer.id).not.toBe(''); expect(customer.id).not.toBe('');
expect(customer.name).not.toBe(''); expect(customer.name).not.toBe('');
expect(customer.document).not.toBe(''); expect(customer.document).not.toBe('');
@@ -242,8 +282,13 @@ describe('DataConsultService', () => {
it('should log error when repository throws exception', async () => { it('should log error when repository throws exception', async () => {
const repositoryError = new Error('Database connection failed'); const repositoryError = new Error('Database connection failed');
context.mockRepository.findCustomers.mockRejectedValue(repositoryError); context.mockRepository.findCustomers.mockRejectedValue(repositoryError);
await expect(context.service.customers('test')).rejects.toThrow(HttpException); await expect(context.service.customers('test')).rejects.toThrow(
expect(context.mockLogger.error).toHaveBeenCalledWith('Erro ao buscar clientes', repositoryError); HttpException,
);
expect(context.mockLogger.error).toHaveBeenCalledWith(
'Erro ao buscar clientes',
repositoryError,
);
}); });
}); });
}); });
@@ -268,7 +313,7 @@ describe('DataConsultService', () => {
] as any); ] as any);
const result = await context.service.getAllProducts(); const result = await context.service.getAllProducts();
result.forEach(product => { result.forEach((product) => {
expect(product.id).toBeDefined(); expect(product.id).toBeDefined();
expect(product.name).toBeDefined(); expect(product.name).toBeDefined();
expect(product.manufacturerCode).toBeDefined(); expect(product.manufacturerCode).toBeDefined();
@@ -281,7 +326,11 @@ describe('DataConsultService', () => {
}); });
it('should validate that repository result is an array', async () => { it('should validate that repository result is an array', async () => {
context.mockRepository.findAllProducts.mockResolvedValue({ id: '001', name: 'Produto 1', manufacturerCode: 'FAB001' } as any); context.mockRepository.findAllProducts.mockResolvedValue({
id: '001',
name: 'Produto 1',
manufacturerCode: 'FAB001',
} as any);
const result = await context.service.getAllProducts(); const result = await context.service.getAllProducts();
expect(Array.isArray(result)).toBe(true); expect(Array.isArray(result)).toBe(true);
}); });
@@ -294,7 +343,7 @@ describe('DataConsultService', () => {
] as any); ] as any);
const result = await context.service.getAllProducts(); const result = await context.service.getAllProducts();
result.forEach(product => { result.forEach((product) => {
expect(product.id).not.toBe(''); expect(product.id).not.toBe('');
expect(product.name).not.toBe(''); expect(product.name).not.toBe('');
expect(product.manufacturerCode).not.toBe(''); expect(product.manufacturerCode).not.toBe('');
@@ -303,9 +352,16 @@ describe('DataConsultService', () => {
it('should log error when repository throws exception', async () => { it('should log error when repository throws exception', async () => {
const repositoryError = new Error('Database connection failed'); const repositoryError = new Error('Database connection failed');
context.mockRepository.findAllProducts.mockRejectedValue(repositoryError); context.mockRepository.findAllProducts.mockRejectedValue(
await expect(context.service.getAllProducts()).rejects.toThrow(HttpException); repositoryError,
expect(context.mockLogger.error).toHaveBeenCalledWith('Erro ao buscar todos os produtos', repositoryError); );
await expect(context.service.getAllProducts()).rejects.toThrow(
HttpException,
);
expect(context.mockLogger.error).toHaveBeenCalledWith(
'Erro ao buscar todos os produtos',
repositoryError,
);
}); });
}); });
}); });
@@ -325,12 +381,19 @@ describe('DataConsultService', () => {
it('should validate that all carriers have required properties (carrierId, carrierName, carrierDescription)', async () => { it('should validate that all carriers have required properties (carrierId, carrierName, carrierDescription)', async () => {
context.mockRepository.findAllCarriers.mockResolvedValue([ context.mockRepository.findAllCarriers.mockResolvedValue([
{ carrierId: '001', carrierName: 'Transportadora 1' }, { carrierId: '001', carrierName: 'Transportadora 1' },
{ carrierName: 'Transportadora 2', carrierDescription: '002 - Transportadora 2' }, {
{ carrierId: '003', carrierName: 'Transportadora 3', carrierDescription: '003 - Transportadora 3' }, carrierName: 'Transportadora 2',
carrierDescription: '002 - Transportadora 2',
},
{
carrierId: '003',
carrierName: 'Transportadora 3',
carrierDescription: '003 - Transportadora 3',
},
] as any); ] as any);
const result = await context.service.getAllCarriers(); const result = await context.service.getAllCarriers();
result.forEach(carrier => { result.forEach((carrier) => {
expect(carrier.carrierId).toBeDefined(); expect(carrier.carrierId).toBeDefined();
expect(carrier.carrierName).toBeDefined(); expect(carrier.carrierName).toBeDefined();
expect(carrier.carrierDescription).toBeDefined(); expect(carrier.carrierDescription).toBeDefined();
@@ -343,20 +406,36 @@ describe('DataConsultService', () => {
}); });
it('should validate that repository result is an array', async () => { it('should validate that repository result is an array', async () => {
context.mockRepository.findAllCarriers.mockResolvedValue({ carrierId: '001', carrierName: 'Transportadora 1', carrierDescription: '001 - Transportadora 1' } as any); context.mockRepository.findAllCarriers.mockResolvedValue({
carrierId: '001',
carrierName: 'Transportadora 1',
carrierDescription: '001 - Transportadora 1',
} as any);
const result = await context.service.getAllCarriers(); const result = await context.service.getAllCarriers();
expect(Array.isArray(result)).toBe(true); expect(Array.isArray(result)).toBe(true);
}); });
it('should validate that required properties are not empty strings', async () => { it('should validate that required properties are not empty strings', async () => {
context.mockRepository.findAllCarriers.mockResolvedValue([ context.mockRepository.findAllCarriers.mockResolvedValue([
{ carrierId: '', carrierName: 'Transportadora 1', carrierDescription: '001 - Transportadora 1' }, {
{ carrierId: '002', carrierName: '', carrierDescription: '002 - Transportadora 2' }, carrierId: '',
{ carrierId: '003', carrierName: 'Transportadora 3', carrierDescription: '' }, carrierName: 'Transportadora 1',
carrierDescription: '001 - Transportadora 1',
},
{
carrierId: '002',
carrierName: '',
carrierDescription: '002 - Transportadora 2',
},
{
carrierId: '003',
carrierName: 'Transportadora 3',
carrierDescription: '',
},
] as any); ] as any);
const result = await context.service.getAllCarriers(); const result = await context.service.getAllCarriers();
result.forEach(carrier => { result.forEach((carrier) => {
expect(carrier.carrierId).not.toBe(''); expect(carrier.carrierId).not.toBe('');
expect(carrier.carrierName).not.toBe(''); expect(carrier.carrierName).not.toBe('');
expect(carrier.carrierDescription).not.toBe(''); expect(carrier.carrierDescription).not.toBe('');
@@ -365,9 +444,16 @@ describe('DataConsultService', () => {
it('should log error when repository throws exception', async () => { it('should log error when repository throws exception', async () => {
const repositoryError = new Error('Database connection failed'); const repositoryError = new Error('Database connection failed');
context.mockRepository.findAllCarriers.mockRejectedValue(repositoryError); context.mockRepository.findAllCarriers.mockRejectedValue(
await expect(context.service.getAllCarriers()).rejects.toThrow(HttpException); repositoryError,
expect(context.mockLogger.error).toHaveBeenCalledWith('Erro ao buscar transportadoras', repositoryError); );
await expect(context.service.getAllCarriers()).rejects.toThrow(
HttpException,
);
expect(context.mockLogger.error).toHaveBeenCalledWith(
'Erro ao buscar transportadoras',
repositoryError,
);
}); });
}); });
}); });
@@ -392,7 +478,7 @@ describe('DataConsultService', () => {
] as any); ] as any);
const result = await context.service.getRegions(); const result = await context.service.getRegions();
result.forEach(region => { result.forEach((region) => {
expect(region.numregiao).toBeDefined(); expect(region.numregiao).toBeDefined();
expect(region.regiao).toBeDefined(); expect(region.regiao).toBeDefined();
}); });
@@ -404,7 +490,10 @@ describe('DataConsultService', () => {
}); });
it('should validate that repository result is an array', async () => { it('should validate that repository result is an array', async () => {
context.mockRepository.findRegions.mockResolvedValue({ numregiao: 1, regiao: 'Região Sul' } as any); context.mockRepository.findRegions.mockResolvedValue({
numregiao: 1,
regiao: 'Região Sul',
} as any);
const result = await context.service.getRegions(); const result = await context.service.getRegions();
expect(Array.isArray(result)).toBe(true); expect(Array.isArray(result)).toBe(true);
}); });
@@ -417,7 +506,7 @@ describe('DataConsultService', () => {
] as any); ] as any);
const result = await context.service.getRegions(); const result = await context.service.getRegions();
result.forEach(region => { result.forEach((region) => {
expect(region.numregiao).toBeDefined(); expect(region.numregiao).toBeDefined();
expect(region.numregiao).not.toBeNull(); expect(region.numregiao).not.toBeNull();
expect(region.regiao).toBeDefined(); expect(region.regiao).toBeDefined();
@@ -428,8 +517,133 @@ describe('DataConsultService', () => {
it('should log error when repository throws exception', async () => { it('should log error when repository throws exception', async () => {
const repositoryError = new Error('Database connection failed'); const repositoryError = new Error('Database connection failed');
context.mockRepository.findRegions.mockRejectedValue(repositoryError); context.mockRepository.findRegions.mockRejectedValue(repositoryError);
await expect(context.service.getRegions()).rejects.toThrow(HttpException); await expect(context.service.getRegions()).rejects.toThrow(
expect(context.mockLogger.error).toHaveBeenCalledWith('Erro ao buscar regiões', repositoryError); HttpException,
);
expect(context.mockLogger.error).toHaveBeenCalledWith(
'Erro ao buscar regiões',
repositoryError,
);
});
});
});
describe('products', () => {
let context: Awaited<ReturnType<typeof createDataConsultServiceTestModule>>;
beforeEach(async () => {
context = await createDataConsultServiceTestModule();
});
afterEach(() => {
jest.clearAllMocks();
});
describe('Tests that expose problems', () => {
it('should search products by CODPROD', async () => {
context.mockRepository.findProducts.mockResolvedValue([
{
id: '12345',
name: 'PRODUTO EXEMPLO',
manufacturerCode: 'FAB001',
},
] as any);
const result = await context.service.products('12345');
expect(result).toHaveLength(1);
expect(result[0].id).toBe('12345');
expect(result[0].name).toBe('PRODUTO EXEMPLO');
expect(context.mockRepository.findProducts).toHaveBeenCalledWith(
'12345',
);
});
it('should search products by CODAUXILIAR', async () => {
context.mockRepository.findProducts.mockResolvedValue([
{
id: '12345',
name: 'PRODUTO EXEMPLO',
manufacturerCode: 'FAB001',
},
] as any);
const result = await context.service.products('7891234567890');
expect(result).toHaveLength(1);
expect(result[0].id).toBe('12345');
expect(context.mockRepository.findProducts).toHaveBeenCalledWith(
'7891234567890',
);
});
it('should search products by CODPROD or CODAUXILIAR', async () => {
context.mockRepository.findProducts.mockResolvedValue([
{
id: '12345',
name: 'PRODUTO EXEMPLO',
manufacturerCode: 'FAB001',
},
{
id: '12346',
name: 'OUTRO PRODUTO',
manufacturerCode: 'FAB002',
},
] as any);
const result = await context.service.products('12345');
expect(result).toHaveLength(2);
expect(result[0].id).toBe('12345');
expect(result[1].id).toBe('12346');
});
it('should handle empty result from repository', async () => {
context.mockRepository.findProducts.mockResolvedValue([]);
const result = await context.service.products('99999');
expect(result).toHaveLength(0);
expect(Array.isArray(result)).toBe(true);
});
it('should validate that all products have required properties (id, name)', async () => {
context.mockRepository.findProducts.mockResolvedValue([
{ id: '12345', name: 'PRODUTO 1' },
{ id: '12346', name: 'PRODUTO 2' },
{ id: '12347', name: 'PRODUTO 3' },
] as any);
const result = await context.service.products('12345');
result.forEach((product) => {
expect(product.id).toBeDefined();
expect(product.name).toBeDefined();
});
});
it('should throw error when filter is invalid', async () => {
await expect(
context.service.products(null as any),
).rejects.toThrow(HttpException);
await expect(
context.service.products(undefined as any),
).rejects.toThrow(HttpException);
await expect(
context.service.products('' as any),
).rejects.toThrow(HttpException);
});
it('should log error when repository throws exception', async () => {
const repositoryError = new Error('Database connection failed');
context.mockRepository.findProducts.mockRejectedValue(repositoryError);
await expect(context.service.products('12345')).rejects.toThrow(
HttpException,
);
expect(context.mockLogger.error).toHaveBeenCalledWith(
'Erro ao buscar produtos',
repositoryError,
);
}); });
}); });
}); });

View File

@@ -1,21 +1,16 @@
import {
import { ApiTags, ApiOperation, ApiParam, ApiBearerAuth, ApiResponse } from '@nestjs/swagger'; ApiTags,
} from '@nestjs/swagger';
import { Controller, Get, Param } from '@nestjs/common'; import { Controller, Get, Param } from '@nestjs/common';
import { clientesService } from './clientes.service'; import { clientesService } from './clientes.service';
@ApiTags('clientes') @ApiTags('clientes')
@Controller('api/v1/') @Controller('api/v1/')
export class clientesController { export class clientesController {
constructor(private readonly clientesService: clientesService) {}
constructor(private readonly clientesService: clientesService) {}
@Get('clientes/:filter')
async customer(@Param('filter') filter: string) {
return this.clientesService.customers(filter);
}
@Get('clientes/:filter')
async customer(@Param('filter') filter: string) {
return this.clientesService.customers(filter);
}
} }

View File

@@ -1,5 +1,5 @@
/* eslint-disable prettier/prettier */ /* eslint-disable prettier/prettier */
/* eslint-disable @typescript-eslint/no-unused-vars */
import { clientesService } from './clientes.service'; import { clientesService } from './clientes.service';
import { clientesController } from './clientes.controller'; import { clientesController } from './clientes.controller';

View File

@@ -63,7 +63,9 @@ export class clientesService {
' ( '||REGEXP_REPLACE(PCCLIENT.CGCENT, '[^0-9]', '')||' )' as "name" ' ( '||REGEXP_REPLACE(PCCLIENT.CGCENT, '[^0-9]', '')||' )' as "name"
,PCCLIENT.ESTCOB as "estcob" ,PCCLIENT.ESTCOB as "estcob"
FROM PCCLIENT FROM PCCLIENT
WHERE PCCLIENT.CLIENTE LIKE '${filter.toUpperCase().replace('@', '%')}%' WHERE PCCLIENT.CLIENTE LIKE '${filter
.toUpperCase()
.replace('@', '%')}%'
ORDER BY PCCLIENT.CLIENTE`; ORDER BY PCCLIENT.CLIENTE`;
customers = await queryRunner.manager.query(sql); customers = await queryRunner.manager.query(sql);
} }
@@ -72,7 +74,7 @@ export class clientesService {
} finally { } finally {
await queryRunner.release(); await queryRunner.release();
} }
} },
); );
} }
@@ -103,7 +105,7 @@ export class clientesService {
} finally { } finally {
await queryRunner.release(); await queryRunner.release();
} }
} },
); );
} }
@@ -136,19 +138,13 @@ export class clientesService {
} finally { } finally {
await queryRunner.release(); await queryRunner.release();
} }
} },
); );
} }
/**
* Limpar cache de clientes (útil para invalidação)
* @param pattern - Padrão de chaves para limpar (opcional)
*/
async clearCustomersCache(pattern?: string) { async clearCustomersCache(pattern?: string) {
const cachePattern = pattern || 'clientes:*'; const cachePattern = pattern || 'clientes:*';
// Nota: Esta funcionalidade requer implementação específica do Redis
// Por enquanto, mantemos a interface para futuras implementações
console.log(`Cache de clientes seria limpo para o padrão: ${cachePattern}`);
} }
} }

View File

@@ -1,116 +1,184 @@
import { Controller, Get, Param, Query, UseGuards, UsePipes, ValidationPipe, ParseIntPipe } from '@nestjs/common'; import {
import { ApiTags, ApiOperation, ApiParam, ApiBearerAuth, ApiResponse, ApiQuery } from '@nestjs/swagger'; Controller,
import { DataConsultService } from './data-consult.service'; Get,
import { JwtAuthGuard } from 'src/auth/guards/jwt-auth.guard' Param,
import { ProductDto } from './dto/product.dto'; Query,
import { StoreDto } from './dto/store.dto'; UseGuards,
import { SellerDto } from './dto/seller.dto'; UsePipes,
import { BillingDto } from './dto/billing.dto'; ValidationPipe,
import { CustomerDto } from './dto/customer.dto'; ParseIntPipe,
import { RegionDto } from './dto/region.dto'; } from '@nestjs/common';
import { CarrierDto, FindCarriersDto } from './dto/carrier.dto'; import {
ApiTags,
@ApiTags('DataConsult') ApiOperation,
@Controller('api/v1/data-consult') ApiParam,
export class DataConsultController { ApiBearerAuth,
ApiResponse,
constructor(private readonly dataConsultService: DataConsultService) {} } from '@nestjs/swagger';
import { DataConsultService } from './data-consult.service';
@UseGuards(JwtAuthGuard) import { JwtAuthGuard } from 'src/auth/guards/jwt-auth.guard';
@ApiBearerAuth() import { ProductDto } from './dto/product.dto';
@Get('stores') import { StoreDto } from './dto/store.dto';
@ApiOperation({ summary: 'Lista todas as lojas' }) import { SellerDto } from './dto/seller.dto';
@ApiResponse({ status: 200, description: 'Lista de lojas retornada com sucesso', type: [StoreDto] }) import { BillingDto } from './dto/billing.dto';
async stores(): Promise<StoreDto[]> { import { CustomerDto } from './dto/customer.dto';
return this.dataConsultService.stores(); import { RegionDto } from './dto/region.dto';
} import { CarrierDto, FindCarriersDto } from './dto/carrier.dto';
@UseGuards(JwtAuthGuard) @ApiTags('DataConsult')
@ApiBearerAuth() @Controller('api/v1/data-consult')
@Get('sellers') export class DataConsultController {
@ApiOperation({ summary: 'Lista todos os vendedores' }) constructor(private readonly dataConsultService: DataConsultService) {}
@ApiResponse({ status: 200, description: 'Lista de vendedores retornada com sucesso', type: [SellerDto] })
async sellers(): Promise<SellerDto[]> { @UseGuards(JwtAuthGuard)
return this.dataConsultService.sellers(); @ApiBearerAuth()
} @Get('stores')
@ApiOperation({ summary: 'Lista todas as lojas' })
@UseGuards(JwtAuthGuard) @ApiResponse({
@ApiBearerAuth() status: 200,
@Get('billings') description: 'Lista de lojas retornada com sucesso',
@ApiOperation({ summary: 'Retorna informações de faturamento' }) type: [StoreDto],
@ApiResponse({ status: 200, description: 'Informações de faturamento retornadas com sucesso', type: [BillingDto] }) })
async billings(): Promise<BillingDto[]> { async stores(): Promise<StoreDto[]> {
return this.dataConsultService.billings(); return this.dataConsultService.stores();
} }
@UseGuards(JwtAuthGuard) @UseGuards(JwtAuthGuard)
@ApiBearerAuth() @ApiBearerAuth()
@Get('customers/:filter') @Get('sellers')
@ApiOperation({ summary: 'Filtra clientes pelo parâmetro fornecido' }) @ApiOperation({ summary: 'Lista todos os vendedores' })
@ApiParam({ name: 'filter', description: 'Filtro de busca para clientes' }) @ApiResponse({
@ApiResponse({ status: 200, description: 'Lista de clientes filtrados retornada com sucesso', type: [CustomerDto] }) status: 200,
async customer(@Param('filter') filter: string): Promise<CustomerDto[]> { description: 'Lista de vendedores retornada com sucesso',
return this.dataConsultService.customers(filter); type: [SellerDto],
} })
async sellers(): Promise<SellerDto[]> {
@UseGuards(JwtAuthGuard) return this.dataConsultService.sellers();
@ApiBearerAuth() }
@Get('products/:filter')
@ApiOperation({ summary: 'Busca produtos filtrados' }) @UseGuards(JwtAuthGuard)
@ApiParam({ name: 'filter', description: 'Filtro de busca' }) @ApiBearerAuth()
@ApiResponse({ status: 200, description: 'Lista de produtos filtrados retornada com sucesso', type: [ProductDto] }) @Get('billings')
async products(@Param('filter') filter: string): Promise<ProductDto[]> { @ApiOperation({ summary: 'Retorna informações de faturamento' })
return this.dataConsultService.products(filter); @ApiResponse({
} status: 200,
description: 'Informações de faturamento retornadas com sucesso',
type: [BillingDto],
@Get('all') })
@UseGuards(JwtAuthGuard) async billings(): Promise<BillingDto[]> {
@ApiBearerAuth() return this.dataConsultService.billings();
@ApiOperation({ summary: 'VIEW DE 500 PRODUTOS' }) }
@ApiResponse({ status: 200, description: 'Lista de 500 produtos retornada com sucesso', type: [ProductDto] })
async getAllProducts(): Promise<ProductDto[]> { @UseGuards(JwtAuthGuard)
return this.dataConsultService.getAllProducts(); @ApiBearerAuth()
} @Get('customers/:filter')
@ApiOperation({ summary: 'Filtra clientes pelo parâmetro fornecido' })
@Get('carriers/all') @ApiParam({ name: 'filter', description: 'Filtro de busca para clientes' })
@UseGuards(JwtAuthGuard) @ApiResponse({
@ApiBearerAuth() status: 200,
@ApiOperation({ summary: 'Lista todas as transportadoras cadastradas' }) description: 'Lista de clientes filtrados retornada com sucesso',
@ApiResponse({ status: 200, description: 'Lista de transportadoras retornada com sucesso', type: [CarrierDto] }) type: [CustomerDto],
@UsePipes(new ValidationPipe({ transform: true })) })
async getAllCarriers(): Promise<CarrierDto[]> { async customer(@Param('filter') filter: string): Promise<CustomerDto[]> {
return this.dataConsultService.getAllCarriers(); return this.dataConsultService.customers(filter);
} }
@Get('carriers') @UseGuards(JwtAuthGuard)
@UseGuards(JwtAuthGuard) @ApiBearerAuth()
@ApiBearerAuth() @Get('products/codauxiliar/:codauxiliar')
@ApiOperation({ summary: 'Busca transportadoras por período de data' }) @ApiOperation({ summary: 'Busca produtos por código auxiliar (EAN)' })
@ApiResponse({ status: 200, description: 'Lista de transportadoras por período retornada com sucesso', type: [CarrierDto] }) @ApiParam({ name: 'codauxiliar', description: 'Código auxiliar (EAN) do produto' })
@UsePipes(new ValidationPipe({ transform: true })) @ApiResponse({
async getCarriersByDate(@Query() query: FindCarriersDto): Promise<CarrierDto[]> { status: 200,
return this.dataConsultService.getCarriersByDate(query); description: 'Lista de produtos encontrados por código auxiliar',
} type: [ProductDto],
})
@Get('carriers/order/:orderId') async productsByCodauxiliar(@Param('codauxiliar') codauxiliar: string): Promise<ProductDto[]> {
@UseGuards(JwtAuthGuard) return this.dataConsultService.productsByCodauxiliar(codauxiliar);
@ApiBearerAuth() }
@ApiOperation({ summary: 'Busca transportadoras de um pedido específico' })
@ApiParam({ name: 'orderId', example: 236001388 }) @UseGuards(JwtAuthGuard)
@ApiResponse({ status: 200, description: 'Lista de transportadoras do pedido retornada com sucesso', type: [CarrierDto] }) @ApiBearerAuth()
@UsePipes(new ValidationPipe({ transform: true })) @Get('products/:filter')
async getOrderCarriers(@Param('orderId', ParseIntPipe) orderId: number): Promise<CarrierDto[]> { @ApiOperation({ summary: 'Busca produtos filtrados' })
return this.dataConsultService.getOrderCarriers(orderId); @ApiParam({ name: 'filter', description: 'Filtro de busca' })
} @ApiResponse({
status: 200,
@Get('regions') description: 'Lista de produtos filtrados retornada com sucesso',
//@UseGuards(JwtAuthGuard) type: [ProductDto],
//@ApiBearerAuth() })
@ApiOperation({ summary: 'Lista todas as regiões cadastradas' }) async products(@Param('filter') filter: string): Promise<ProductDto[]> {
@ApiResponse({ status: 200, description: 'Lista de regiões retornada com sucesso', type: [RegionDto] }) return this.dataConsultService.products(filter);
async getRegions(): Promise<RegionDto[]> { }
return this.dataConsultService.getRegions();
} @UseGuards(JwtAuthGuard)
@ApiBearerAuth()
} @Get('all')
@ApiOperation({ summary: 'Lista 500 produtos' })
@ApiResponse({
status: 200,
description: 'Lista de 500 produtos retornada com sucesso',
type: [ProductDto],
})
async getAllProducts(): Promise<ProductDto[]> {
return this.dataConsultService.getAllProducts();
}
@UseGuards(JwtAuthGuard)
@ApiBearerAuth()
@Get('carriers/all')
@ApiOperation({ summary: 'Lista todas as transportadoras cadastradas' })
@ApiResponse({
status: 200,
description: 'Lista de transportadoras retornada com sucesso',
type: [CarrierDto],
})
@UsePipes(new ValidationPipe({ transform: true }))
async getAllCarriers(): Promise<CarrierDto[]> {
return this.dataConsultService.getAllCarriers();
}
@UseGuards(JwtAuthGuard)
@ApiBearerAuth()
@Get('carriers')
@ApiOperation({ summary: 'Busca transportadoras por período de data' })
@ApiResponse({
status: 200,
description: 'Lista de transportadoras por período retornada com sucesso',
type: [CarrierDto],
})
@UsePipes(new ValidationPipe({ transform: true }))
async getCarriersByDate(
@Query() query: FindCarriersDto,
): Promise<CarrierDto[]> {
return this.dataConsultService.getCarriersByDate(query);
}
@UseGuards(JwtAuthGuard)
@ApiBearerAuth()
@Get('carriers/order/:orderId')
@ApiOperation({ summary: 'Busca transportadoras de um pedido específico' })
@ApiParam({ name: 'orderId', example: 236001388 })
@ApiResponse({
status: 200,
description: 'Lista de transportadoras do pedido retornada com sucesso',
type: [CarrierDto],
})
@UsePipes(new ValidationPipe({ transform: true }))
async getOrderCarriers(
@Param('orderId', ParseIntPipe) orderId: number,
): Promise<CarrierDto[]> {
return this.dataConsultService.getOrderCarriers(orderId);
}
@Get('regions')
@ApiOperation({ summary: 'Lista todas as regiões cadastradas' })
@ApiResponse({
status: 200,
description: 'Lista de regiões retornada com sucesso',
type: [RegionDto],
})
async getRegions(): Promise<RegionDto[]> {
return this.dataConsultService.getRegions();
}
}

View File

@@ -1,19 +1,14 @@
import { Module } from '@nestjs/common'; import { Module } from '@nestjs/common';
import { DataConsultService } from './data-consult.service'; import { DataConsultService } from './data-consult.service';
import { DataConsultController } from './data-consult.controller'; import { DataConsultController } from './data-consult.controller';
import { DataConsultRepository } from './data-consult.repository'; import { DataConsultRepository } from './data-consult.repository';
import { LoggerModule } from 'src/Log/logger.module'; import { ConfigModule } from '@nestjs/config';
import { ConfigModule } from '@nestjs/config'; import { RedisModule } from 'src/core/configs/cache/redis.module';
import { RedisModule } from 'src/core/configs/cache/redis.module'; import { clientes } from './clientes.module';
import { clientes } from './clientes.module';
@Module({
@Module({ imports: [ConfigModule, RedisModule, clientes],
imports: [LoggerModule, ConfigModule, RedisModule, clientes], controllers: [DataConsultController],
controllers: [DataConsultController], providers: [DataConsultService, DataConsultRepository],
providers: [ })
DataConsultService, export class DataConsultModule {}
DataConsultRepository,
],
})
export class DataConsultModule {}

View File

@@ -1,6 +1,5 @@
import { Injectable, Inject } from '@nestjs/common'; import { Injectable, Inject } from '@nestjs/common';
import { DataSource } from 'typeorm'; import { DataSource } from 'typeorm';
import { createOracleConfig } from '../core/configs/typeorm.oracle.config';
import { StoreDto } from './dto/store.dto'; import { StoreDto } from './dto/store.dto';
import { SellerDto } from './dto/seller.dto'; import { SellerDto } from './dto/seller.dto';
import { BillingDto } from './dto/billing.dto'; import { BillingDto } from './dto/billing.dto';
@@ -14,7 +13,7 @@ import { DATA_SOURCE } from '../core/constants';
export class DataConsultRepository { export class DataConsultRepository {
constructor( constructor(
@Inject(DATA_SOURCE) private readonly dataSource: DataSource, @Inject(DATA_SOURCE) private readonly dataSource: DataSource,
private readonly configService: ConfigService private readonly configService: ConfigService,
) {} ) {}
private async executeQuery<T>(sql: string, params: any[] = []): Promise<T> { private async executeQuery<T>(sql: string, params: any[] = []): Promise<T> {
@@ -38,10 +37,10 @@ export class DataConsultRepository {
ORDER BY TO_NUMBER(PCFILIAL.CODIGO) ORDER BY TO_NUMBER(PCFILIAL.CODIGO)
`; `;
const results = await this.executeQuery<StoreDto[]>(sql); const results = await this.executeQuery<StoreDto[]>(sql);
return results.map(result => new StoreDto(result)); return results.map((result) => new StoreDto(result));
} }
async findSellers(): Promise<SellerDto[]> { async findSellers(): Promise<SellerDto[]> {
const sql = ` const sql = `
SELECT PCUSUARI.CODUSUR as "id", SELECT PCUSUARI.CODUSUR as "id",
PCUSUARI.NOME as "name" PCUSUARI.NOME as "name"
@@ -51,27 +50,28 @@ async findSellers(): Promise<SellerDto[]> {
AND (PCUSUARI.BLOQUEIO IS NULL OR PCUSUARI.BLOQUEIO = 'N') AND (PCUSUARI.BLOQUEIO IS NULL OR PCUSUARI.BLOQUEIO = 'N')
`; `;
const results = await this.executeQuery<SellerDto[]>(sql); const results = await this.executeQuery<SellerDto[]>(sql);
return results.map(result => new SellerDto(result)); return results.map((result) => new SellerDto(result));
} }
async findBillings(): Promise<BillingDto[]> { async findBillings(): Promise<BillingDto[]> {
const sql = ` const sql = `
SELECT p.CODCOB, p.COBRANCA FROM PCCOB p SELECT p.CODCOB as "id",
SYSDATE as "date",
0 as "total",
p.COBRANCA as "description"
FROM PCCOB p
`; `;
const results = await this.executeQuery<BillingDto[]>(sql); const results = await this.executeQuery<BillingDto[]>(sql);
return results.map(result => new BillingDto(result)); return results.map((result) => new BillingDto(result));
} }
async findCustomers(filter: string): Promise<CustomerDto[]> { async findCustomers(filter: string): Promise<CustomerDto[]> {
// 1) limpa todos os não-dígitos para buscas exatas
const cleanedDigits = filter.replace(/\D/g, ''); const cleanedDigits = filter.replace(/\D/g, '');
// 2) prepara filtro para busca por nome (LIKE)
const likeFilter = `%${filter.toUpperCase().replace(/@/g, '%')}%`; const likeFilter = `%${filter.toUpperCase().replace(/@/g, '%')}%`;
let customers: CustomerDto[] = []; let customers: CustomerDto[] = [];
// --- 1ª tentativa: busca por código do cliente (CODCLI) ---
let sql = ` let sql = `
SELECT SELECT
PCCLIENT.CODCLI AS "id", PCCLIENT.CODCLI AS "id",
@@ -84,7 +84,6 @@ async findSellers(): Promise<SellerDto[]> {
`; `;
customers = await this.executeQuery<CustomerDto[]>(sql, [cleanedDigits]); customers = await this.executeQuery<CustomerDto[]>(sql, [cleanedDigits]);
// --- 2ª tentativa: busca por CPF/CNPJ (CGCENT) ---
if (customers.length === 0) { if (customers.length === 0) {
sql = ` sql = `
SELECT SELECT
@@ -99,7 +98,6 @@ async findSellers(): Promise<SellerDto[]> {
customers = await this.executeQuery<CustomerDto[]>(sql, [cleanedDigits]); customers = await this.executeQuery<CustomerDto[]>(sql, [cleanedDigits]);
} }
// --- 3ª tentativa: busca parcial por nome ---
if (customers.length === 0) { if (customers.length === 0) {
sql = ` sql = `
SELECT SELECT
@@ -114,18 +112,31 @@ async findSellers(): Promise<SellerDto[]> {
customers = await this.executeQuery<CustomerDto[]>(sql, [likeFilter]); customers = await this.executeQuery<CustomerDto[]>(sql, [likeFilter]);
} }
return customers.map(row => new CustomerDto(row)); return customers.map((row) => new CustomerDto(row));
} }
async findProducts(filter: string): Promise<ProductDto[]> { async findProducts(filter: string): Promise<ProductDto[]> {
const cleanedFilter = filter.replace(/\D/g, '');
const sql = ` const sql = `
SELECT PCPRODUT.CODPROD as "id", SELECT PCPRODUT.CODPROD as "id",
PCPRODUT.CODPROD || ' - ' || PCPRODUT.DESCRICAO || ' ( ' || PCPRODUT.CODFAB || ' )' as "description" PCPRODUT.CODPROD || ' - ' || PCPRODUT.DESCRICAO || ' ( ' || PCPRODUT.CODFAB || ' )' as "description"
FROM PCPRODUT FROM PCPRODUT
WHERE PCPRODUT.CODPROD = :filter WHERE PCPRODUT.CODPROD = :0
OR REGEXP_REPLACE(PCPRODUT.CODAUXILIAR, '[^0-9]', '') = :1
`; `;
const results = await this.executeQuery<ProductDto[]>(sql, [filter]); const results = await this.executeQuery<ProductDto[]>(sql, [filter, cleanedFilter]);
return results.map(result => new ProductDto(result)); return results.map((result) => new ProductDto(result));
}
async findProductsByCodauxiliar(codauxiliar: string): Promise<ProductDto[]> {
const sql = `
SELECT PCPRODUT.CODPROD as "id",
PCPRODUT.CODPROD || ' - ' || PCPRODUT.DESCRICAO || ' ( ' || PCPRODUT.CODFAB || ' )' as "description"
FROM PCPRODUT
WHERE REGEXP_REPLACE(PCPRODUT.CODAUXILIAR, '[^0-9]', '') = REGEXP_REPLACE(:codauxiliar, '[^0-9]', '')
`;
const results = await this.dataSource.query(sql, [codauxiliar]);
return results.map((result) => new ProductDto(result));
} }
async findAllProducts(): Promise<ProductDto[]> { async findAllProducts(): Promise<ProductDto[]> {
@@ -136,12 +147,9 @@ async findSellers(): Promise<SellerDto[]> {
WHERE ROWNUM <= 500 WHERE ROWNUM <= 500
`; `;
const results = await this.executeQuery<ProductDto[]>(sql); const results = await this.executeQuery<ProductDto[]>(sql);
return results.map(result => new ProductDto(result)); return results.map((result) => new ProductDto(result));
} }
/**
* Busca todas as transportadoras cadastradas no sistema
*/
async findAllCarriers(): Promise<any[]> { async findAllCarriers(): Promise<any[]> {
const sql = ` const sql = `
SELECT DISTINCT SELECT DISTINCT
@@ -157,9 +165,6 @@ async findSellers(): Promise<SellerDto[]> {
return await this.executeQuery<any[]>(sql); return await this.executeQuery<any[]>(sql);
} }
/**
* Busca as transportadoras por período de data
*/
async findCarriersByDate(query: any): Promise<any[]> { async findCarriersByDate(query: any): Promise<any[]> {
let sql = ` let sql = `
SELECT DISTINCT SELECT DISTINCT
@@ -178,12 +183,16 @@ async findSellers(): Promise<SellerDto[]> {
let paramIndex = 0; let paramIndex = 0;
if (query.dateIni) { if (query.dateIni) {
conditions.push(`AND PCPEDC.DATA >= TO_DATE(:${paramIndex}, 'YYYY-MM-DD')`); conditions.push(
`AND PCPEDC.DATA >= TO_DATE(:${paramIndex}, 'YYYY-MM-DD')`,
);
parameters.push(query.dateIni); parameters.push(query.dateIni);
paramIndex++; paramIndex++;
} }
if (query.dateEnd) { if (query.dateEnd) {
conditions.push(`AND PCPEDC.DATA <= TO_DATE(:${paramIndex}, 'YYYY-MM-DD')`); conditions.push(
`AND PCPEDC.DATA <= TO_DATE(:${paramIndex}, 'YYYY-MM-DD')`,
);
parameters.push(query.dateEnd); parameters.push(query.dateEnd);
paramIndex++; paramIndex++;
} }
@@ -193,16 +202,13 @@ async findSellers(): Promise<SellerDto[]> {
paramIndex++; paramIndex++;
} }
sql += "\n" + conditions.join("\n"); sql += '\n' + conditions.join('\n');
sql += "\nGROUP BY PCPEDC.CODFORNECFRETE, PCFORNEC.FORNECEDOR"; sql += '\nGROUP BY PCPEDC.CODFORNECFRETE, PCFORNEC.FORNECEDOR';
sql += "\nORDER BY PCPEDC.CODFORNECFRETE"; sql += '\nORDER BY PCPEDC.CODFORNECFRETE';
return await this.executeQuery<any[]>(sql, parameters); return await this.executeQuery<any[]>(sql, parameters);
} }
/**
* Busca as transportadoras de um pedido específico
*/
async findOrderCarriers(orderId: number): Promise<any[]> { async findOrderCarriers(orderId: number): Promise<any[]> {
const sql = ` const sql = `
SELECT DISTINCT SELECT DISTINCT
@@ -219,9 +225,6 @@ async findSellers(): Promise<SellerDto[]> {
return await this.executeQuery<any[]>(sql, [orderId]); return await this.executeQuery<any[]>(sql, [orderId]);
} }
/**
* Busca todas as regiões cadastradas
*/
async findRegions(): Promise<RegionDto[]> { async findRegions(): Promise<RegionDto[]> {
const sql = ` const sql = `
SELECT SELECT
@@ -231,6 +234,6 @@ async findSellers(): Promise<SellerDto[]> {
ORDER BY PCREGIAO.NUMREGIAO ORDER BY PCREGIAO.NUMREGIAO
`; `;
const results = await this.executeQuery<RegionDto[]>(sql); const results = await this.executeQuery<RegionDto[]>(sql);
return results.map(result => new RegionDto(result)); return results.map((result) => new RegionDto(result));
} }
} }

View File

@@ -1,331 +1,452 @@
import { Injectable, HttpException, HttpStatus, Inject } from '@nestjs/common'; import { Injectable, HttpException, HttpStatus, Inject, Logger } from '@nestjs/common';
import { DataConsultRepository } from './data-consult.repository'; import { DataConsultRepository } from './data-consult.repository';
import { StoreDto } from './dto/store.dto'; import { StoreDto } from './dto/store.dto';
import { SellerDto } from './dto/seller.dto'; import { SellerDto } from './dto/seller.dto';
import { BillingDto } from './dto/billing.dto'; import { BillingDto } from './dto/billing.dto';
import { CustomerDto } from './dto/customer.dto'; import { CustomerDto } from './dto/customer.dto';
import { ProductDto } from './dto/product.dto'; import { ProductDto } from './dto/product.dto';
import { RegionDto } from './dto/region.dto'; import { RegionDto } from './dto/region.dto';
import { CarrierDto, FindCarriersDto } from './dto/carrier.dto'; import { CarrierDto, FindCarriersDto } from './dto/carrier.dto';
import { ILogger } from '../Log/ILogger'; import { RedisClientToken } from '../core/configs/cache/redis-client.adapter.provider';
import { RedisClientToken } from '../core/configs/cache/redis-client.adapter.provider'; import { IRedisClient } from '../core/configs/cache/IRedisClient';
import { IRedisClient } from '../core/configs/cache/IRedisClient'; import { getOrSetCache } from '../shared/cache.util';
import { getOrSetCache } from '../shared/cache.util'; import { DataSource } from 'typeorm';
import { DataSource } from 'typeorm'; import { DATA_SOURCE } from '../core/constants';
import { DATA_SOURCE } from '../core/constants';
@Injectable()
@Injectable() export class DataConsultService {
export class DataConsultService { private readonly logger = new Logger(DataConsultService.name);
private readonly SELLERS_CACHE_KEY = 'data-consult:sellers'; private readonly SELLERS_CACHE_KEY = 'data-consult:sellers';
private readonly SELLERS_TTL = 3600; private readonly SELLERS_TTL = 3600;
private readonly STORES_TTL = 3600; private readonly STORES_TTL = 3600;
private readonly BILLINGS_TTL = 3600; private readonly BILLINGS_TTL = 3600;
private readonly ALL_PRODUCTS_CACHE_KEY = 'data-consult:products:all'; private readonly ALL_PRODUCTS_CACHE_KEY = 'data-consult:products:all';
private readonly ALL_PRODUCTS_TTL = 600; private readonly ALL_PRODUCTS_TTL = 600;
private readonly CUSTOMERS_TTL = 3600; private readonly CUSTOMERS_TTL = 3600;
private readonly CARRIERS_CACHE_KEY = 'data-consult:carriers:all'; private readonly CARRIERS_CACHE_KEY = 'data-consult:carriers:all';
private readonly CARRIERS_TTL = 3600; private readonly CARRIERS_TTL = 3600;
private readonly REGIONS_CACHE_KEY = 'data-consult:regions'; private readonly REGIONS_CACHE_KEY = 'data-consult:regions';
private readonly REGIONS_TTL = 7200; private readonly REGIONS_TTL = 7200;
constructor( constructor(
private readonly repository: DataConsultRepository, private readonly repository: DataConsultRepository,
@Inject(RedisClientToken) private readonly redisClient: IRedisClient, @Inject(RedisClientToken) private readonly redisClient: IRedisClient,
@Inject('LoggerService') private readonly logger: ILogger, @Inject(DATA_SOURCE) private readonly dataSource: DataSource,
@Inject(DATA_SOURCE) private readonly dataSource: DataSource ) {}
) {}
async stores(): Promise<StoreDto[]> {
async stores(): Promise<StoreDto[]> { this.logger.log('Buscando todas as lojas');
this.logger.log('Buscando todas as lojas'); try {
try { const stores = await this.repository.findStores();
const stores = await this.repository.findStores();
if (stores === null || stores === undefined) {
if (stores === null || stores === undefined) { throw new HttpException(
throw new HttpException('Resultado inválido do repositório', HttpStatus.INTERNAL_SERVER_ERROR); 'Resultado inválido do repositório',
} HttpStatus.INTERNAL_SERVER_ERROR,
);
const storesArray = Array.isArray(stores) ? stores : [stores]; }
return storesArray const storesArray = Array.isArray(stores) ? stores : [stores];
.filter(store => {
if (!store || typeof store !== 'object') { return storesArray
return false; .filter((store) => {
} if (!store || typeof store !== 'object') {
const hasId = store.id !== undefined && store.id !== null && store.id !== ''; return false;
const hasName = store.name !== undefined && store.name !== null && store.name !== ''; }
const hasStore = store.store !== undefined && store.store !== null && store.store !== ''; const hasId =
return hasId && hasName && hasStore; store.id !== undefined && store.id !== null && store.id !== '';
}) const hasName =
.map(store => new StoreDto(store)); store.name !== undefined &&
} catch (error) { store.name !== null &&
this.logger.error('Erro ao buscar lojas', error); store.name !== '';
throw new HttpException('Erro ao buscar lojas', HttpStatus.INTERNAL_SERVER_ERROR); const hasStore =
} store.store !== undefined &&
} store.store !== null &&
store.store !== '';
async sellers(): Promise<SellerDto[]> { return hasId && hasName && hasStore;
this.logger.log('Buscando vendedores com cache Redis...'); })
try { .map((store) => new StoreDto(store));
return await getOrSetCache<SellerDto[]>( } catch (error) {
this.redisClient, this.logger.error('Erro ao buscar lojas', error);
this.SELLERS_CACHE_KEY, throw new HttpException(
this.SELLERS_TTL, 'Erro ao buscar lojas',
async () => { HttpStatus.INTERNAL_SERVER_ERROR,
try { );
const sellers = await this.repository.findSellers(); }
}
if (sellers === null || sellers === undefined) {
throw new HttpException('Resultado inválido do repositório', HttpStatus.INTERNAL_SERVER_ERROR); async sellers(): Promise<SellerDto[]> {
} this.logger.log('Buscando vendedores com cache Redis...');
try {
const sellersArray = Array.isArray(sellers) ? sellers : [sellers]; return await getOrSetCache<SellerDto[]>(
this.redisClient,
return sellersArray this.SELLERS_CACHE_KEY,
.filter(seller => { this.SELLERS_TTL,
if (!seller || typeof seller !== 'object') { async () => {
return false; try {
} const sellers = await this.repository.findSellers();
const hasId = seller.id !== undefined && seller.id !== null && seller.id !== '';
const hasName = seller.name !== undefined && seller.name !== null && seller.name !== ''; if (sellers === null || sellers === undefined) {
return hasId && hasName; throw new HttpException(
}) 'Resultado inválido do repositório',
.map(seller => new SellerDto(seller)); HttpStatus.INTERNAL_SERVER_ERROR,
} catch (error) { );
this.logger.error('Erro ao buscar vendedores', error); }
throw error;
} const sellersArray = Array.isArray(sellers) ? sellers : [sellers];
}
); return sellersArray
} catch (error) { .filter((seller) => {
this.logger.error('Erro ao buscar vendedores', error); if (!seller || typeof seller !== 'object') {
throw new HttpException('Erro ao buscar vendedores', HttpStatus.INTERNAL_SERVER_ERROR); return false;
} }
} const hasId =
seller.id !== undefined &&
async billings(): Promise<BillingDto[]> { seller.id !== null &&
this.logger.log('Buscando informações de faturamento'); seller.id !== '';
try { const hasName =
const billings = await this.repository.findBillings(); seller.name !== undefined &&
seller.name !== null &&
if (billings === null || billings === undefined) { seller.name !== '';
throw new HttpException('Resultado inválido do repositório', HttpStatus.INTERNAL_SERVER_ERROR); return hasId && hasName;
} })
.map((seller) => new SellerDto(seller));
const billingsArray = Array.isArray(billings) ? billings : [billings]; } catch (error) {
this.logger.error('Erro ao buscar vendedores', error);
return billingsArray throw error;
.filter(billing => { }
if (!billing || typeof billing !== 'object') { },
return false; );
} } catch (error) {
const hasId = billing.id !== undefined && billing.id !== null && billing.id !== ''; this.logger.error('Erro ao buscar vendedores', error);
const hasDate = billing.date !== undefined && billing.date !== null; throw new HttpException(
const hasTotal = billing.total !== undefined && billing.total !== null; 'Erro ao buscar vendedores',
return hasId && hasDate && hasTotal; HttpStatus.INTERNAL_SERVER_ERROR,
}) );
.map(billing => new BillingDto(billing)); }
} catch (error) { }
this.logger.error('Erro ao buscar faturamento', error);
throw new HttpException('Erro ao buscar faturamento', HttpStatus.INTERNAL_SERVER_ERROR); async billings(): Promise<BillingDto[]> {
} this.logger.log('Buscando informações de faturamento');
} try {
const billings = await this.repository.findBillings();
async customers(filter: string): Promise<CustomerDto[]> {
this.logger.log(`Buscando clientes com filtro: ${filter}`); if (billings === null || billings === undefined) {
try { throw new HttpException(
if (!filter || typeof filter !== 'string') { 'Resultado inválido do repositório',
throw new HttpException('Filtro inválido', HttpStatus.BAD_REQUEST); HttpStatus.INTERNAL_SERVER_ERROR,
} );
const customers = await this.repository.findCustomers(filter); }
if (customers === null || customers === undefined) { const billingsArray = Array.isArray(billings) ? billings : [billings];
throw new HttpException('Resultado inválido do repositório', HttpStatus.INTERNAL_SERVER_ERROR);
} return billingsArray
.filter((billing) => {
const customersArray = Array.isArray(customers) ? customers : [customers]; if (!billing || typeof billing !== 'object') {
return false;
return customersArray }
.filter(customer => { const hasId =
if (!customer || typeof customer !== 'object') { billing.id !== undefined &&
return false; billing.id !== null &&
} billing.id !== '';
const hasId = customer.id !== undefined && customer.id !== null && customer.id !== ''; const hasDate = billing.date !== undefined && billing.date !== null;
const hasName = customer.name !== undefined && customer.name !== null && customer.name !== ''; const hasTotal =
const hasDocument = customer.document !== undefined && customer.document !== null && customer.document !== ''; billing.total !== undefined && billing.total !== null;
return hasId && hasName && hasDocument; return hasId && hasDate && hasTotal;
}) })
.map(customer => new CustomerDto(customer)); .map((billing) => new BillingDto(billing));
} catch (error) { } catch (error) {
this.logger.error('Erro ao buscar clientes', error); this.logger.error('Erro ao buscar faturamento', error);
throw new HttpException('Erro ao buscar clientes', HttpStatus.INTERNAL_SERVER_ERROR); throw new HttpException(
} 'Erro ao buscar faturamento',
} HttpStatus.INTERNAL_SERVER_ERROR,
);
async products(filter: string): Promise<ProductDto[]> { }
this.logger.log(`Buscando produtos com filtro: ${filter}`); }
try {
if (!filter || typeof filter !== 'string') { async customers(filter: string): Promise<CustomerDto[]> {
throw new HttpException('Filtro inválido', HttpStatus.BAD_REQUEST); this.logger.log(`Buscando clientes com filtro: ${filter}`);
} try {
const products = await this.repository.findProducts(filter); if (!filter || typeof filter !== 'string') {
return products.map(product => new ProductDto(product)); throw new HttpException('Filtro inválido', HttpStatus.BAD_REQUEST);
} catch (error) { }
this.logger.error('Erro ao buscar produtos', error); const customers = await this.repository.findCustomers(filter);
throw new HttpException('Erro ao buscar produtos', HttpStatus.INTERNAL_SERVER_ERROR);
} if (customers === null || customers === undefined) {
} throw new HttpException(
'Resultado inválido do repositório',
async getAllProducts(): Promise<ProductDto[]> { HttpStatus.INTERNAL_SERVER_ERROR,
this.logger.log('Buscando todos os produtos'); );
try { }
return await getOrSetCache<ProductDto[]>(
this.redisClient, const customersArray = Array.isArray(customers) ? customers : [customers];
this.ALL_PRODUCTS_CACHE_KEY,
this.ALL_PRODUCTS_TTL, return customersArray
async () => { .filter((customer) => {
try { if (!customer || typeof customer !== 'object') {
const products = await this.repository.findAllProducts(); return false;
}
if (products === null || products === undefined) { const hasId =
throw new HttpException('Resultado inválido do repositório', HttpStatus.INTERNAL_SERVER_ERROR); customer.id !== undefined &&
} customer.id !== null &&
customer.id !== '';
const productsArray = Array.isArray(products) ? products : [products]; const hasName =
customer.name !== undefined &&
return productsArray customer.name !== null &&
.filter(product => { customer.name !== '';
if (!product || typeof product !== 'object') { const hasDocument =
return false; customer.document !== undefined &&
} customer.document !== null &&
const hasId = product.id !== undefined && product.id !== null && product.id !== ''; customer.document !== '';
const hasName = product.name !== undefined && product.name !== null && product.name !== ''; return hasId && hasName && hasDocument;
const hasManufacturerCode = product.manufacturerCode !== undefined && product.manufacturerCode !== null && product.manufacturerCode !== ''; })
return hasId && hasName && hasManufacturerCode; .map((customer) => new CustomerDto(customer));
}) } catch (error) {
.map(product => new ProductDto(product)); this.logger.error('Erro ao buscar clientes', error);
} catch (error) { throw new HttpException(
this.logger.error('Erro ao buscar todos os produtos', error); 'Erro ao buscar clientes',
throw error; HttpStatus.INTERNAL_SERVER_ERROR,
} );
} }
); }
} catch (error) {
this.logger.error('Erro ao buscar todos os produtos', error); async products(filter: string): Promise<ProductDto[]> {
throw new HttpException('Erro ao buscar produtos', HttpStatus.INTERNAL_SERVER_ERROR); this.logger.log(`Buscando produtos com filtro: ${filter}`);
} try {
} if (!filter || typeof filter !== 'string') {
throw new HttpException('Filtro inválido', HttpStatus.BAD_REQUEST);
async getAllCarriers(): Promise<CarrierDto[]> { }
this.logger.log('Buscando todas as transportadoras'); const products = await this.repository.findProducts(filter);
try { return products.map((product) => new ProductDto(product));
return await getOrSetCache<CarrierDto[]>( } catch (error) {
this.redisClient, this.logger.error('Erro ao buscar produtos', error);
this.CARRIERS_CACHE_KEY, throw new HttpException(
this.CARRIERS_TTL, 'Erro ao buscar produtos',
async () => { HttpStatus.INTERNAL_SERVER_ERROR,
try { );
const carriers = await this.repository.findAllCarriers(); }
}
if (carriers === null || carriers === undefined) {
throw new HttpException('Resultado inválido do repositório', HttpStatus.INTERNAL_SERVER_ERROR); async productsByCodauxiliar(codauxiliar: string): Promise<ProductDto[]> {
} this.logger.log(`Buscando produtos por codauxiliar: ${codauxiliar}`);
try {
const carriersArray = Array.isArray(carriers) ? carriers : [carriers]; if (!codauxiliar || typeof codauxiliar !== 'string') {
throw new HttpException('Código auxiliar inválido', HttpStatus.BAD_REQUEST);
return carriersArray }
.filter(carrier => { const products = await this.repository.findProductsByCodauxiliar(codauxiliar);
if (!carrier || typeof carrier !== 'object') { return products.map((product) => new ProductDto(product));
return false; } catch (error) {
} this.logger.error('Erro ao buscar produtos por codauxiliar', error);
const hasCarrierId = carrier.carrierId !== undefined && carrier.carrierId !== null && carrier.carrierId !== ''; throw new HttpException(
const hasCarrierName = carrier.carrierName !== undefined && carrier.carrierName !== null && carrier.carrierName !== ''; 'Erro ao buscar produtos por codauxiliar',
const hasCarrierDescription = carrier.carrierDescription !== undefined && carrier.carrierDescription !== null && carrier.carrierDescription !== ''; HttpStatus.INTERNAL_SERVER_ERROR,
return hasCarrierId && hasCarrierName && hasCarrierDescription; );
}) }
.map(carrier => ({ }
carrierId: carrier.carrierId?.toString() || '',
carrierName: carrier.carrierName || '', async getAllProducts(): Promise<ProductDto[]> {
carrierDescription: carrier.carrierDescription || '', this.logger.log('Buscando todos os produtos');
})); try {
} catch (error) { return await getOrSetCache<ProductDto[]>(
this.logger.error('Erro ao buscar transportadoras', error); this.redisClient,
throw error; this.ALL_PRODUCTS_CACHE_KEY,
} this.ALL_PRODUCTS_TTL,
} async () => {
); try {
} catch (error) { const products = await this.repository.findAllProducts();
this.logger.error('Erro ao buscar transportadoras', error);
throw new HttpException('Erro ao buscar transportadoras', HttpStatus.INTERNAL_SERVER_ERROR); if (products === null || products === undefined) {
} throw new HttpException(
} 'Resultado inválido do repositório',
HttpStatus.INTERNAL_SERVER_ERROR,
async getCarriersByDate(query: FindCarriersDto): Promise<CarrierDto[]> { );
this.logger.log(`Buscando transportadoras por período: ${JSON.stringify(query)}`); }
try {
const carriers = await this.repository.findCarriersByDate(query); const productsArray = Array.isArray(products)
return carriers.map(carrier => ({ ? products
carrierId: carrier.carrierId?.toString() || '', : [products];
carrierName: carrier.carrierName || '',
carrierDescription: carrier.carrierDescription || '', return productsArray
ordersCount: carrier.ordersCount ? Number(carrier.ordersCount) : 0, .filter((product) => {
})); if (!product || typeof product !== 'object') {
} catch (error) { return false;
this.logger.error('Erro ao buscar transportadoras por período', error); }
throw new HttpException('Erro ao buscar transportadoras', HttpStatus.INTERNAL_SERVER_ERROR); const hasId =
} product.id !== undefined &&
} product.id !== null &&
product.id !== '';
async getOrderCarriers(orderId: number): Promise<CarrierDto[]> { const hasName =
this.logger.log(`Buscando transportadoras do pedido: ${orderId}`); product.name !== undefined &&
try { product.name !== null &&
const carriers = await this.repository.findOrderCarriers(orderId); product.name !== '';
return carriers.map(carrier => ({ const hasManufacturerCode =
carrierId: carrier.carrierId?.toString() || '', product.manufacturerCode !== undefined &&
carrierName: carrier.carrierName || '', product.manufacturerCode !== null &&
carrierDescription: carrier.carrierDescription || '', product.manufacturerCode !== '';
})); return hasId && hasName && hasManufacturerCode;
} catch (error) { })
this.logger.error('Erro ao buscar transportadoras do pedido', error); .map((product) => new ProductDto(product));
throw new HttpException('Erro ao buscar transportadoras do pedido', HttpStatus.INTERNAL_SERVER_ERROR); } catch (error) {
} this.logger.error('Erro ao buscar todos os produtos', error);
} throw error;
}
async getRegions(): Promise<RegionDto[]> { },
this.logger.log('Buscando todas as regiões'); );
try { } catch (error) {
return await getOrSetCache<RegionDto[]>( this.logger.error('Erro ao buscar todos os produtos', error);
this.redisClient, throw new HttpException(
this.REGIONS_CACHE_KEY, 'Erro ao buscar produtos',
this.REGIONS_TTL, HttpStatus.INTERNAL_SERVER_ERROR,
async () => { );
try { }
const regions = await this.repository.findRegions(); }
if (regions === null || regions === undefined) { async getAllCarriers(): Promise<CarrierDto[]> {
throw new HttpException('Resultado inválido do repositório', HttpStatus.INTERNAL_SERVER_ERROR); this.logger.log('Buscando todas as transportadoras');
} try {
return await getOrSetCache<CarrierDto[]>(
const regionsArray = Array.isArray(regions) ? regions : [regions]; this.redisClient,
this.CARRIERS_CACHE_KEY,
return regionsArray this.CARRIERS_TTL,
.filter(region => { async () => {
if (!region || typeof region !== 'object') { try {
return false; const carriers = await this.repository.findAllCarriers();
}
const hasNumregiao = region.numregiao !== undefined && region.numregiao !== null; if (carriers === null || carriers === undefined) {
const hasRegiao = region.regiao !== undefined && region.regiao !== null && region.regiao !== ''; throw new HttpException(
return hasNumregiao && hasRegiao; 'Resultado inválido do repositório',
}) HttpStatus.INTERNAL_SERVER_ERROR,
.map(region => new RegionDto(region)); );
} catch (error) { }
this.logger.error('Erro ao buscar regiões', error);
throw error; const carriersArray = Array.isArray(carriers)
} ? carriers
} : [carriers];
);
} catch (error) { return carriersArray
this.logger.error('Erro ao buscar regiões', error); .filter((carrier) => {
throw new HttpException('Erro ao buscar regiões', HttpStatus.INTERNAL_SERVER_ERROR); if (!carrier || typeof carrier !== 'object') {
} return false;
} }
} const hasCarrierId =
carrier.carrierId !== undefined &&
carrier.carrierId !== null &&
carrier.carrierId !== '';
const hasCarrierName =
carrier.carrierName !== undefined &&
carrier.carrierName !== null &&
carrier.carrierName !== '';
const hasCarrierDescription =
carrier.carrierDescription !== undefined &&
carrier.carrierDescription !== null &&
carrier.carrierDescription !== '';
return hasCarrierId && hasCarrierName && hasCarrierDescription;
})
.map((carrier) => ({
carrierId: carrier.carrierId?.toString() || '',
carrierName: carrier.carrierName || '',
carrierDescription: carrier.carrierDescription || '',
}));
} catch (error) {
this.logger.error('Erro ao buscar transportadoras', error);
throw error;
}
},
);
} catch (error) {
this.logger.error('Erro ao buscar transportadoras', error);
throw new HttpException(
'Erro ao buscar transportadoras',
HttpStatus.INTERNAL_SERVER_ERROR,
);
}
}
async getCarriersByDate(query: FindCarriersDto): Promise<CarrierDto[]> {
this.logger.log(
`Buscando transportadoras por período: ${JSON.stringify(query)}`,
);
try {
const carriers = await this.repository.findCarriersByDate(query);
return carriers.map((carrier) => ({
carrierId: carrier.carrierId?.toString() || '',
carrierName: carrier.carrierName || '',
carrierDescription: carrier.carrierDescription || '',
ordersCount: carrier.ordersCount ? Number(carrier.ordersCount) : 0,
}));
} catch (error) {
this.logger.error('Erro ao buscar transportadoras por período', error);
throw new HttpException(
'Erro ao buscar transportadoras',
HttpStatus.INTERNAL_SERVER_ERROR,
);
}
}
async getOrderCarriers(orderId: number): Promise<CarrierDto[]> {
this.logger.log(`Buscando transportadoras do pedido: ${orderId}`);
try {
const carriers = await this.repository.findOrderCarriers(orderId);
return carriers.map((carrier) => ({
carrierId: carrier.carrierId?.toString() || '',
carrierName: carrier.carrierName || '',
carrierDescription: carrier.carrierDescription || '',
}));
} catch (error) {
this.logger.error('Erro ao buscar transportadoras do pedido', error);
throw new HttpException(
'Erro ao buscar transportadoras do pedido',
HttpStatus.INTERNAL_SERVER_ERROR,
);
}
}
async getRegions(): Promise<RegionDto[]> {
this.logger.log('Buscando todas as regiões');
try {
return await getOrSetCache<RegionDto[]>(
this.redisClient,
this.REGIONS_CACHE_KEY,
this.REGIONS_TTL,
async () => {
try {
const regions = await this.repository.findRegions();
if (regions === null || regions === undefined) {
throw new HttpException(
'Resultado inválido do repositório',
HttpStatus.INTERNAL_SERVER_ERROR,
);
}
const regionsArray = Array.isArray(regions) ? regions : [regions];
return regionsArray
.filter((region) => {
if (!region || typeof region !== 'object') {
return false;
}
const hasNumregiao =
region.numregiao !== undefined && region.numregiao !== null;
const hasRegiao =
region.regiao !== undefined &&
region.regiao !== null &&
region.regiao !== '';
return hasNumregiao && hasRegiao;
})
.map((region) => new RegionDto(region));
} catch (error) {
this.logger.error('Erro ao buscar regiões', error);
throw error;
}
},
);
} catch (error) {
this.logger.error('Erro ao buscar regiões', error);
throw new HttpException(
'Erro ao buscar regiões',
HttpStatus.INTERNAL_SERVER_ERROR,
);
}
}
}

View File

@@ -4,26 +4,26 @@ import { IsOptional, IsString, IsDateString } from 'class-validator';
export class CarrierDto { export class CarrierDto {
@ApiProperty({ @ApiProperty({
description: 'ID da transportadora', description: 'ID da transportadora',
example: '123' example: '123',
}) })
carrierId: string; carrierId: string;
@ApiProperty({ @ApiProperty({
description: 'Nome da transportadora', description: 'Nome da transportadora',
example: 'TRANSPORTADORA ABC LTDA' example: 'TRANSPORTADORA ABC LTDA',
}) })
carrierName: string; carrierName: string;
@ApiProperty({ @ApiProperty({
description: 'Descrição completa da transportadora (ID - Nome)', description: 'Descrição completa da transportadora (ID - Nome)',
example: '123 - TRANSPORTADORA ABC LTDA' example: '123 - TRANSPORTADORA ABC LTDA',
}) })
carrierDescription: string; carrierDescription: string;
@ApiProperty({ @ApiProperty({
description: 'Quantidade de pedidos da transportadora no período', description: 'Quantidade de pedidos da transportadora no período',
example: 15, example: 15,
required: false required: false,
}) })
ordersCount?: number; ordersCount?: number;
} }
@@ -32,7 +32,7 @@ export class FindCarriersDto {
@ApiProperty({ @ApiProperty({
description: 'Data inicial para filtro (formato YYYY-MM-DD)', description: 'Data inicial para filtro (formato YYYY-MM-DD)',
example: '2024-01-01', example: '2024-01-01',
required: false required: false,
}) })
@IsOptional() @IsOptional()
@IsDateString() @IsDateString()
@@ -41,7 +41,7 @@ export class FindCarriersDto {
@ApiProperty({ @ApiProperty({
description: 'Data final para filtro (formato YYYY-MM-DD)', description: 'Data final para filtro (formato YYYY-MM-DD)',
example: '2024-12-31', example: '2024-12-31',
required: false required: false,
}) })
@IsOptional() @IsOptional()
@IsDateString() @IsDateString()
@@ -50,9 +50,9 @@ export class FindCarriersDto {
@ApiProperty({ @ApiProperty({
description: 'ID da filial', description: 'ID da filial',
example: '1', example: '1',
required: false required: false,
}) })
@IsOptional() @IsOptional()
@IsString() @IsString()
codfilial?: string; codfilial?: string;
} }

View File

@@ -20,4 +20,3 @@ export class RegionDto {
Object.assign(this, partial); Object.assign(this, partial);
} }
} }

View File

@@ -1,258 +0,0 @@
import { Injectable, Logger } from '@nestjs/common';
import { HttpService } from '@nestjs/axios';
import { ConfigService } from '@nestjs/config';
import { firstValueFrom } from 'rxjs';
import { HealthCheckResult } from '@nestjs/terminus';
@Injectable()
export class HealthAlertService {
private readonly logger = new Logger(HealthAlertService.name);
private readonly webhookUrls: Record<string, string>;
private readonly alertThresholds: Record<string, any>;
private readonly alertCooldowns: Map<string, number> = new Map();
constructor(
private readonly httpService: HttpService,
private readonly configService: ConfigService,
) {
// Configurações de webhooks para diferentes canais de alerta
this.webhookUrls = {
slack: this.configService.get<string>('ALERT_WEBHOOK_SLACK'),
teams: this.configService.get<string>('ALERT_WEBHOOK_TEAMS'),
email: this.configService.get<string>('ALERT_WEBHOOK_EMAIL'),
};
// Thresholds para diferentes tipos de alerta
this.alertThresholds = {
disk: {
criticalPercent: this.configService.get<number>('ALERT_DISK_CRITICAL_PERCENT', 90),
warningPercent: this.configService.get<number>('ALERT_DISK_WARNING_PERCENT', 80),
},
memory: {
criticalPercent: this.configService.get<number>('ALERT_MEMORY_CRITICAL_PERCENT', 90),
warningPercent: this.configService.get<number>('ALERT_MEMORY_WARNING_PERCENT', 80),
},
db: {
cooldownMinutes: this.configService.get<number>('ALERT_DB_COOLDOWN_MINUTES', 15),
},
};
}
async processHealthCheckResult(result: HealthCheckResult): Promise<void> {
try {
const { status, info, error, details } = result;
// Se o status geral não for 'ok', envie um alerta
if (status !== 'ok') {
// Verificar quais componentes estão com problema
const failedComponents = Object.entries(error)
.map(([key, value]) => ({ key, value }));
if (failedComponents.length > 0) {
await this.sendAlert('critical', 'Health Check Falhou',
`Os seguintes componentes estão com problemas: ${failedComponents.map(c => c.key).join(', ')}`,
{ result, failedComponents }
);
}
}
// Verificar alertas específicos para cada tipo de componente
if (details.disk) {
await this.checkDiskAlerts(details.disk);
}
if (details.memory_heap) {
await this.checkMemoryAlerts(details.memory_heap);
}
// Verificar alertas de banco de dados
['oracle', 'postgres'].forEach(db => {
if (details[db] && details[db].status !== 'up') {
this.checkDatabaseAlerts(db, details[db]);
}
});
} catch (error) {
this.logger.error(`Erro ao processar health check result: ${error.message}`, error.stack);
}
}
private async checkDiskAlerts(diskDetails: any): Promise<void> {
try {
if (!diskDetails.freeBytes || !diskDetails.totalBytes) {
return;
}
const usedPercent = ((diskDetails.totalBytes - diskDetails.freeBytes) / diskDetails.totalBytes) * 100;
if (usedPercent >= this.alertThresholds.disk.criticalPercent) {
await this.sendAlert('critical', 'Espaço em Disco Crítico',
`O uso de disco está em ${usedPercent.toFixed(1)}%, acima do limite crítico de ${this.alertThresholds.disk.criticalPercent}%`,
{ diskDetails, usedPercent }
);
} else if (usedPercent >= this.alertThresholds.disk.warningPercent) {
await this.sendAlert('warning', 'Alerta de Espaço em Disco',
`O uso de disco está em ${usedPercent.toFixed(1)}%, acima do limite de alerta de ${this.alertThresholds.disk.warningPercent}%`,
{ diskDetails, usedPercent }
);
}
} catch (error) {
this.logger.error(`Erro ao verificar alertas de disco: ${error.message}`);
}
}
private async checkMemoryAlerts(memoryDetails: any): Promise<void> {
try {
if (!memoryDetails.usedBytes || !memoryDetails.thresholdBytes) {
return;
}
const usedPercent = (memoryDetails.usedBytes / memoryDetails.thresholdBytes) * 100;
if (usedPercent >= this.alertThresholds.memory.criticalPercent) {
await this.sendAlert('critical', 'Uso de Memória Crítico',
`O uso de memória heap está em ${usedPercent.toFixed(1)}%, acima do limite crítico de ${this.alertThresholds.memory.criticalPercent}%`,
{ memoryDetails, usedPercent }
);
} else if (usedPercent >= this.alertThresholds.memory.warningPercent) {
await this.sendAlert('warning', 'Alerta de Uso de Memória',
`O uso de memória heap está em ${usedPercent.toFixed(1)}%, acima do limite de alerta de ${this.alertThresholds.memory.warningPercent}%`,
{ memoryDetails, usedPercent }
);
}
} catch (error) {
this.logger.error(`Erro ao verificar alertas de memória: ${error.message}`);
}
}
private async checkDatabaseAlerts(dbName: string, dbDetails: any): Promise<void> {
try {
const now = Date.now();
const lastAlertTime = this.alertCooldowns.get(dbName) || 0;
const cooldownMs = this.alertThresholds.db.cooldownMinutes * 60 * 1000;
// Verifica se já passou o período de cooldown para este banco
if (now - lastAlertTime >= cooldownMs) {
await this.sendAlert('critical', `Problema de Conexão com Banco de Dados ${dbName}`,
`A conexão com o banco de dados ${dbName} está com problemas: ${dbDetails.message || 'Erro não especificado'}`,
{ dbName, dbDetails }
);
// Atualiza o timestamp do último alerta
this.alertCooldowns.set(dbName, now);
}
} catch (error) {
this.logger.error(`Erro ao verificar alertas de banco de dados: ${error.message}`);
}
}
private async sendAlert(
severity: 'critical' | 'warning' | 'info',
title: string,
message: string,
details?: any,
): Promise<void> {
try {
const environment = this.configService.get<string>('NODE_ENV', 'development');
const appName = this.configService.get<string>('APP_NAME', 'Portal Jurunense API');
this.logger.warn(`[${severity.toUpperCase()}] ${title}: ${message}`);
const payload = {
severity,
title: `[${environment.toUpperCase()}] [${appName}] ${title}`,
message,
timestamp: new Date().toISOString(),
details: details || {},
environment,
};
// Enviar para Slack, se configurado
if (this.webhookUrls.slack) {
await this.sendSlackAlert(payload);
}
// Enviar para Microsoft Teams, se configurado
if (this.webhookUrls.teams) {
await this.sendTeamsAlert(payload);
}
// Enviar para serviço de email, se configurado
if (this.webhookUrls.email) {
await this.sendEmailAlert(payload);
}
} catch (error) {
this.logger.error(`Erro ao enviar alerta: ${error.message}`, error.stack);
}
}
private async sendSlackAlert(payload: any): Promise<void> {
try {
const slackPayload = {
text: `${payload.title}`,
blocks: [
{
type: 'header',
text: {
type: 'plain_text',
text: payload.title,
},
},
{
type: 'section',
text: {
type: 'mrkdwn',
text: `*Mensagem:* ${payload.message}\n*Severidade:* ${payload.severity}\n*Ambiente:* ${payload.environment}\n*Timestamp:* ${payload.timestamp}`,
},
},
],
};
await firstValueFrom(this.httpService.post(this.webhookUrls.slack, slackPayload));
} catch (error) {
this.logger.error(`Erro ao enviar alerta para Slack: ${error.message}`);
}
}
private async sendTeamsAlert(payload: any): Promise<void> {
try {
const teamsPayload = {
"@type": "MessageCard",
"@context": "http://schema.org/extensions",
"themeColor": payload.severity === 'critical' ? "FF0000" : (payload.severity === 'warning' ? "FFA500" : "0078D7"),
"summary": payload.title,
"sections": [
{
"activityTitle": payload.title,
"activitySubtitle": `Severidade: ${payload.severity} | Ambiente: ${payload.environment}`,
"text": payload.message,
"facts": [
{
"name": "Timestamp",
"value": payload.timestamp
}
]
}
]
};
await firstValueFrom(this.httpService.post(this.webhookUrls.teams, teamsPayload));
} catch (error) {
this.logger.error(`Erro ao enviar alerta para Microsoft Teams: ${error.message}`);
}
}
private async sendEmailAlert(payload: any): Promise<void> {
try {
const emailPayload = {
subject: payload.title,
text: `${payload.message}\n\nSeveridade: ${payload.severity}\nAmbiente: ${payload.environment}\nTimestamp: ${payload.timestamp}`,
html: `<h2>${payload.title}</h2><p>${payload.message}</p><p><strong>Severidade:</strong> ${payload.severity}<br><strong>Ambiente:</strong> ${payload.environment}<br><strong>Timestamp:</strong> ${payload.timestamp}</p>`,
};
await firstValueFrom(this.httpService.post(this.webhookUrls.email, emailPayload));
} catch (error) {
this.logger.error(`Erro ao enviar alerta por email: ${error.message}`);
}
}
}

View File

@@ -1,119 +0,0 @@
import { Controller, Get, UseGuards } from '@nestjs/common';
import {
HealthCheck,
HealthCheckService,
HttpHealthIndicator,
DiskHealthIndicator,
MemoryHealthIndicator,
} from '@nestjs/terminus';
import { TypeOrmHealthIndicator } from './indicators/typeorm.health';
import { DbPoolStatsIndicator } from './indicators/db-pool-stats.health';
import { ConfigService } from '@nestjs/config';
import { ApiTags, ApiOperation, ApiBearerAuth } from '@nestjs/swagger';
import { JwtAuthGuard } from 'src/auth/guards/jwt-auth.guard';
import * as os from 'os';
@ApiTags('Health Check')
@Controller('health')
export class HealthController {
private readonly diskPath: string;
constructor(
private health: HealthCheckService,
private http: HttpHealthIndicator,
private disk: DiskHealthIndicator,
private memory: MemoryHealthIndicator,
private typeOrmHealth: TypeOrmHealthIndicator,
private dbPoolStats: DbPoolStatsIndicator,
private configService: ConfigService,
) {
this.diskPath = os.platform() === 'win32' ? 'C:\\' : '/';
}
@UseGuards(JwtAuthGuard)
@ApiBearerAuth()
@Get()
@HealthCheck()
@ApiOperation({ summary: 'Verificar saúde geral da aplicação' })
check() {
return this.health.check([
// Verifica o status da própria aplicação
() => this.http.pingCheck('api', 'http://localhost:8066/docs'),
// Verifica espaço em disco (espaço livre < 80%)
() => this.disk.checkStorage('disk_percent', {
path: this.diskPath,
thresholdPercent: 0.8, // 80%
}),
// Verifica espaço em disco (pelo menos 500MB livres)
() => this.disk.checkStorage('disk_space', {
path: this.diskPath,
threshold: 500 * 1024 * 1024, // 500MB em bytes
}),
// Verifica uso de memória (heap <150MB)
() => this.memory.checkHeap('memory_heap', 150 * 1024 * 1024), // 150MB
// Verifica as conexões de banco de dados
() => this.typeOrmHealth.checkOracle(),
() => this.typeOrmHealth.checkPostgres(),
]);
}
@UseGuards(JwtAuthGuard)
@ApiBearerAuth()
@Get('db')
@HealthCheck()
@ApiOperation({ summary: 'Verificar saúde das conexões de banco de dados' })
checkDatabase() {
return this.health.check([
() => this.typeOrmHealth.checkOracle(),
() => this.typeOrmHealth.checkPostgres(),
]);
}
@UseGuards(JwtAuthGuard)
@ApiBearerAuth()
@Get('memory')
@HealthCheck()
@ApiOperation({ summary: 'Verificar uso de memória' })
checkMemory() {
return this.health.check([
() => this.memory.checkHeap('memory_heap', 150 * 1024 * 1024),
() => this.memory.checkRSS('memory_rss', 300 * 1024 * 1024),
]);
}
@UseGuards(JwtAuthGuard)
@ApiBearerAuth()
@Get('disk')
@HealthCheck()
@ApiOperation({ summary: 'Verificar espaço em disco' })
checkDisk() {
return this.health.check([
// Verificar espaço em disco usando porcentagem
() => this.disk.checkStorage('disk_percent', {
path: this.diskPath,
thresholdPercent: 0.8,
}),
// Verificar espaço em disco usando valor absoluto
() => this.disk.checkStorage('disk_space', {
path: this.diskPath,
threshold: 500 * 1024 * 1024,
}),
]);
}
@UseGuards(JwtAuthGuard)
@ApiBearerAuth()
@Get('pool')
@HealthCheck()
@ApiOperation({ summary: 'Verificar estatísticas do pool de conexões' })
checkPoolStats() {
return this.health.check([
() => this.dbPoolStats.checkOraclePoolStats(),
() => this.dbPoolStats.checkPostgresPoolStats(),
]);
}
}

View File

@@ -1,44 +0,0 @@
import { Module } from '@nestjs/common';
import { TerminusModule } from '@nestjs/terminus';
import { HttpModule } from '@nestjs/axios';
import { HealthController } from './health.controller';
import { TypeOrmHealthIndicator } from './indicators/typeorm.health';
import { DbPoolStatsIndicator } from './indicators/db-pool-stats.health';
import { ConfigModule } from '@nestjs/config';
import { PrometheusModule } from '@willsoto/nestjs-prometheus';
import { metricProviders } from './metrics/metrics.config';
import { CustomMetricsService } from './metrics/custom.metrics';
import { MetricsInterceptor } from './metrics/metrics.interceptor';
import { HealthAlertService } from './alert/health-alert.service';
import { APP_INTERCEPTOR } from '@nestjs/core';
@Module({
imports: [
TerminusModule,
HttpModule,
ConfigModule,
PrometheusModule.register({
path: '/metrics',
defaultMetrics: {
enabled: true,
},
}),
],
controllers: [HealthController],
providers: [
TypeOrmHealthIndicator,
DbPoolStatsIndicator,
CustomMetricsService,
HealthAlertService,
{
provide: APP_INTERCEPTOR,
useClass: MetricsInterceptor,
},
...metricProviders,
],
exports: [
CustomMetricsService,
HealthAlertService,
],
})
export class HealthModule {}

View File

@@ -1,193 +0,0 @@
import { Injectable, Logger } from '@nestjs/common';
import {
HealthIndicator,
HealthIndicatorResult,
HealthCheckError, // Import HealthCheckError for better terminus integration
} from '@nestjs/terminus';
import { InjectConnection } from '@nestjs/typeorm';
import { DataSource } from 'typeorm';
const ORACLE_HEALTH_KEY = 'oracle_pool_stats';
const POSTGRES_HEALTH_KEY = 'postgres_pool_stats';
const ORACLE_PROGRAM_PATTERN = 'node%'; // Default pattern for Oracle
const POSTGRES_APP_NAME_PATTERN = 'nodejs%'; // Default pattern for PostgreSQL
@Injectable()
export class DbPoolStatsIndicator extends HealthIndicator {
private readonly logger = new Logger(DbPoolStatsIndicator.name);
constructor(
@InjectConnection('oracle') private readonly oracleDataSource: DataSource,
@InjectConnection('postgres') private readonly postgresDataSource: DataSource,
) {
super();
}
/**
* Verifica a integridade do pool de conexões Oracle consultando V$SESSION.
* Observações: Requer privilégios SELECT em V$SESSION e depende da coluna PROGRAM.
* Isso verifica principalmente a acessibilidade do banco de dados e o sucesso da execução da consulta.
* Considere estatísticas de pool em nível de driver para obter uma integridade de pool mais precisa, se disponível.
*
* @param key Custom key for the health indicator component.
* @param programLike Optional pattern to match the PROGRAM column in V$SESSION.
*/
async checkOraclePoolStats(
key: string = ORACLE_HEALTH_KEY,
programLike: string = ORACLE_PROGRAM_PATTERN,
): Promise<HealthIndicatorResult> {
try {
// Usar parâmetros de consulta é uma boa prática, embora menos crítica para LIKE com um padrão fixo.
// Oracle usa a sintaxe :paramName
const query = `
SELECT
COUNT(*) AS "totalConnections" -- Use quoted identifiers if needed, or match case below
FROM
V$SESSION
WHERE
TYPE = 'USER'
AND PROGRAM LIKE :pattern
`;
const params = { pattern: programLike };
const results: { totalConnections: number | string }[] =
await this.oracleDataSource.query(query, [params.pattern]); // Pass parameters as an array for Oracle usually
if (!results || results.length === 0) {
this.logger.warn(`Oracle V$SESSION query returned no results for pattern '${programLike}'`);
}
const totalConnections = parseInt(String(results?.[0]?.totalConnections ?? 0), 10);
if (isNaN(totalConnections)) {
throw new Error('Failed to parse totalConnections from Oracle V$SESSION query result.');
}
// isHealthy é verdadeiro se a consulta for executada sem gerar um erro.
// Adicione lógica aqui se contagens de conexão específicas indicarem estado não íntegro (por exemplo, > poolMax)
const isHealthy = true;
const details = {
totalConnections: totalConnections,
programPattern: programLike,
};
return this.getStatus(key, isHealthy, details);
} catch (error) {
this.logger.error(`Oracle pool stats check failed for key "${key}": ${error.message}`, error.stack);
throw new HealthCheckError(
`${key} check failed`,
this.getStatus(key, false, { message: error.message }),
);
}
}
/**
* Verifica a integridade do pool de conexões do PostgreSQL consultando pg_stat_activity.
* Observações: Depende de o application_name estar definido corretamente na string de conexão ou nas opções.
* Isso verifica principalmente a acessibilidade do banco de dados e o sucesso da execução da consulta.
* Considere estatísticas de pool em nível de driver para obter uma integridade de pool mais precisa, se disponível.
*
* @param key Custom key for the health indicator component.
* @param appNameLike Optional pattern to match the application_name column.
*/
async checkPostgresPoolStats(
key: string = POSTGRES_HEALTH_KEY,
appNameLike: string = POSTGRES_APP_NAME_PATTERN,
): Promise<HealthIndicatorResult> {
try {
const query = `
SELECT
count(*) AS "totalConnections",
sum(CASE WHEN state = 'active' THEN 1 ELSE 0 END) AS "activeConnections",
sum(CASE WHEN state = 'idle' THEN 1 ELSE 0 END) AS "idleConnections",
sum(CASE WHEN state = 'idle in transaction' THEN 1 ELSE 0 END) AS "idleInTransactionConnections"
FROM
pg_stat_activity
WHERE
datname = current_database()
AND application_name LIKE $1
`;
const params = [appNameLike];
const results: {
totalConnections: string | number;
activeConnections: string | number;
idleConnections: string | number;
idleInTransactionConnections: string | number;
}[] = await this.postgresDataSource.query(query, params);
if (!results || results.length === 0) {
throw new Error('PostgreSQL pg_stat_activity query returned no results unexpectedly.');
}
const result = results[0];
const totalConnections = parseInt(String(result.totalConnections ?? 0), 10);
const activeConnections = parseInt(String(result.activeConnections ?? 0), 10);
const idleConnections = parseInt(String(result.idleConnections ?? 0), 10);
const idleInTransactionConnections = parseInt(String(result.idleInTransactionConnections ?? 0), 10);
// Validate parsing
if (isNaN(totalConnections) || isNaN(activeConnections) || isNaN(idleConnections) || isNaN(idleInTransactionConnections)) {
throw new Error('Failed to parse connection counts from PostgreSQL pg_stat_activity query result.');
}
const isHealthy = true;
const details = {
totalConnections,
activeConnections,
idleConnections,
idleInTransactionConnections,
applicationNamePattern: appNameLike,
};
return this.getStatus(key, isHealthy, details);
} catch (error) {
this.logger.error(`PostgreSQL pool stats check failed for key "${key}": ${error.message}`, error.stack);
throw new HealthCheckError(
`${key} check failed`,
this.getStatus(key, false, { message: error.message }),
);
}
}
/**
* Convenience method to run all pool checks defined in this indicator.
* You would typically call this from your main HealthController.
*/
async checkAllPools() : Promise<HealthIndicatorResult[]> {
const results = await Promise.allSettled([
this.checkOraclePoolStats(),
this.checkPostgresPoolStats()
]);
// Processa os resultados para se ajustar à estrutura do Terminus, se necessário, ou retorna diretamente
// Observações: Métodos individuais já retornam HealthIndicatorResult ou lançam HealthCheckError
// Este método pode não ser estritamente necessário se você chamar verificações individuais no controlador.
// Para simplificar, vamos supor que o controlador chama as verificações individuais.
// Se você quisesse que esse método retornasse um único status, precisaria de mais lógica.
// Relançar erros ou agregar status.
// Example: Log results (individual methods handle the Terminus return/error)
results.forEach(result => {
if (result.status === 'rejected') {
// Already logged and thrown as HealthCheckError inside the check methods
} else {
// Optionally log success details
this.logger.log(`Pool check successful: ${JSON.stringify(result.value)}`);
}
});
return results
.filter((r): r is PromiseFulfilledResult<HealthIndicatorResult> => r.status === 'fulfilled')
.map(r => r.value);
}
}

View File

@@ -1,52 +0,0 @@
import { Injectable } from '@nestjs/common';
import { HealthIndicator, HealthIndicatorResult, HealthCheckError } from '@nestjs/terminus';
import { InjectConnection } from '@nestjs/typeorm';
import { Connection, DataSource } from 'typeorm';
@Injectable()
export class TypeOrmHealthIndicator extends HealthIndicator {
constructor(
@InjectConnection('oracle') private oracleConnection: DataSource,
@InjectConnection('postgres') private postgresConnection: DataSource,
) {
super();
}
async checkOracle(): Promise<HealthIndicatorResult> {
const key = 'oracle';
try {
const isHealthy = this.oracleConnection.isInitialized;
const result = this.getStatus(key, isHealthy);
if (isHealthy) {
return result;
}
throw new HealthCheckError('Oracle healthcheck failed', result);
} catch (error) {
const result = this.getStatus(key, false, { message: error.message });
throw new HealthCheckError('Oracle healthcheck failed', result);
}
}
async checkPostgres(): Promise<HealthIndicatorResult> {
const key = 'postgres';
try {
const isHealthy = this.postgresConnection.isInitialized;
const result = this.getStatus(key, isHealthy);
if (isHealthy) {
return result;
}
throw new HealthCheckError('Postgres healthcheck failed', result);
} catch (error) {
const result = this.getStatus(key, false, { message: error.message });
throw new HealthCheckError('Postgres healthcheck failed', result);
}
}
}

View File

@@ -1,93 +0,0 @@
import { Injectable } from '@nestjs/common';
import { InjectMetric } from '@willsoto/nestjs-prometheus';
import { Counter, Gauge, Histogram } from 'prom-client';
import { InjectConnection } from '@nestjs/typeorm';
import { DataSource } from 'typeorm';
@Injectable()
export class CustomMetricsService {
constructor(
@InjectMetric('http_request_total')
private readonly requestCounter: Counter<string>,
@InjectMetric('http_request_duration_seconds')
private readonly requestDuration: Histogram<string>,
@InjectMetric('api_memory_usage_bytes')
private readonly memoryGauge: Gauge<string>,
@InjectMetric('api_db_connection_pool_used')
private readonly dbPoolUsedGauge: Gauge<string>,
@InjectMetric('api_db_connection_pool_total')
private readonly dbPoolTotalGauge: Gauge<string>,
@InjectMetric('api_db_query_duration_seconds')
private readonly dbQueryDuration: Histogram<string>,
@InjectConnection('oracle')
private oracleConnection: DataSource,
@InjectConnection('postgres')
private postgresConnection: DataSource,
) {
// Iniciar coleta de métricas de memória
this.startMemoryMetrics();
// Iniciar coleta de métricas do pool de conexões
this.startDbPoolMetrics();
}
recordHttpRequest(method: string, route: string, statusCode: number): void {
this.requestCounter.inc({ method, route, statusCode: statusCode.toString() });
}
startTimingRequest(): (labels?: Record<string, string>) => void {
const end = this.requestDuration.startTimer();
return (labels?: Record<string, string>) => end(labels);
}
recordDbQueryDuration(db: 'oracle' | 'postgres', operation: string, durationMs: number): void {
this.dbQueryDuration.observe({ db, operation }, durationMs / 1000);
}
private startMemoryMetrics(): void {
// Coletar métricas de memória a cada 15 segundos
setInterval(() => {
const memoryUsage = process.memoryUsage();
this.memoryGauge.set({ type: 'rss' }, memoryUsage.rss);
this.memoryGauge.set({ type: 'heapTotal' }, memoryUsage.heapTotal);
this.memoryGauge.set({ type: 'heapUsed' }, memoryUsage.heapUsed);
this.memoryGauge.set({ type: 'external' }, memoryUsage.external);
}, 15000);
}
private startDbPoolMetrics(): void {
// Coletar métricas do pool de conexões a cada 15 segundos
setInterval(async () => {
try {
// Tente obter estatísticas do pool do Oracle
// Nota: depende da implementação específica do OracleDB
if (this.oracleConnection && this.oracleConnection.driver) {
const oraclePoolStats = (this.oracleConnection.driver as any).pool?.getStatistics?.();
if (oraclePoolStats) {
this.dbPoolUsedGauge.set({ db: 'oracle' }, oraclePoolStats.busy || 0);
this.dbPoolTotalGauge.set({ db: 'oracle' }, oraclePoolStats.poolMax || 0);
}
}
// Tente obter estatísticas do pool do Postgres
// Nota: depende da implementação específica do TypeORM
if (this.postgresConnection && this.postgresConnection.driver) {
const pgPoolStats = (this.postgresConnection.driver as any).pool;
if (pgPoolStats) {
this.dbPoolUsedGauge.set({ db: 'postgres' }, pgPoolStats.totalCount - pgPoolStats.idleCount || 0);
this.dbPoolTotalGauge.set({ db: 'postgres' }, pgPoolStats.totalCount || 0);
}
}
} catch (error) {
console.error('Erro ao coletar métricas do pool de conexões:', error);
}
}, 15000);
}
}

View File

@@ -1,51 +0,0 @@
import {
makeCounterProvider,
makeGaugeProvider,
makeHistogramProvider
} from '@willsoto/nestjs-prometheus';
export const metricProviders = [
// Contador de requisições HTTP
makeCounterProvider({
name: 'http_request_total',
help: 'Total de requisições HTTP',
labelNames: ['method', 'route', 'statusCode'],
}),
// Histograma de duração de requisições HTTP
makeHistogramProvider({
name: 'http_request_duration_seconds',
help: 'Duração das requisições HTTP em segundos',
labelNames: ['method', 'route', 'error'], // 👈 adicionado "error"
buckets: [0.01, 0.05, 0.1, 0.5, 1, 2, 5, 10],
}),
// Gauge para uso de memória
makeGaugeProvider({
name: 'api_memory_usage_bytes',
help: 'Uso de memória da aplicação em bytes',
labelNames: ['type'],
}),
// Gauge para conexões de banco de dados usadas
makeGaugeProvider({
name: 'api_db_connection_pool_used',
help: 'Número de conexões de banco de dados em uso',
labelNames: ['db'],
}),
// Gauge para total de conexões no pool de banco de dados
makeGaugeProvider({
name: 'api_db_connection_pool_total',
help: 'Número total de conexões no pool de banco de dados',
labelNames: ['db'],
}),
// Histograma para duração de consultas de banco de dados
makeHistogramProvider({
name: 'api_db_query_duration_seconds',
help: 'Duração das consultas de banco de dados em segundos',
labelNames: ['db', 'operation'],
buckets: [0.001, 0.005, 0.01, 0.05, 0.1, 0.5, 1, 2],
}),
];

View File

@@ -1,64 +0,0 @@
import {
Injectable,
NestInterceptor,
ExecutionContext,
CallHandler,
} from '@nestjs/common';
import { Observable } from 'rxjs';
import { tap } from 'rxjs/operators';
import { CustomMetricsService } from './custom.metrics';
@Injectable()
export class MetricsInterceptor implements NestInterceptor {
constructor(private metricsService: CustomMetricsService) {}
intercept(context: ExecutionContext, next: CallHandler): Observable<any> {
if (context.getType() !== 'http') {
return next.handle();
}
const request = context.switchToHttp().getRequest();
const { method, url } = request;
// Simplificar a rota para evitar cardinalidade alta no Prometheus
// Ex: /users/123 -> /users/:id
const route = this.normalizeRoute(url);
// Inicia o timer para medir a duração da requisição
const endTimer = this.metricsService.startTimingRequest();
return next.handle().pipe(
tap({
next: (data) => {
const response = context.switchToHttp().getResponse();
const statusCode = response.statusCode;
// Registra a requisição concluída
this.metricsService.recordHttpRequest(method, route, statusCode);
// Finaliza o timer com labels adicionais
endTimer({ method, route });
},
error: (error) => {
// Determina o código de status do erro
const statusCode = error.status || 500;
// Registra a requisição com erro
this.metricsService.recordHttpRequest(method, route, statusCode);
// Finaliza o timer com labels adicionais
endTimer({ method, route, error: 'true' });
}
})
);
}
private normalizeRoute(url: string): string {
// Remove query parameters
const path = url.split('?')[0];
// Normaliza rotas com IDs e outros parâmetros dinâmicos
// Por exemplo, /users/123 -> /users/:id
return path.replace(/\/[0-9a-f]{8,}|\/[0-9]+/g, '/:id');
}
}

View File

@@ -1,5 +1,5 @@
/* eslint-disable prettier/prettier */ /* eslint-disable prettier/prettier */
/* eslint-disable @typescript-eslint/no-unused-vars */
/* /*
https://docs.nestjs.com/controllers#controllers https://docs.nestjs.com/controllers#controllers
*/ */

View File

@@ -1,5 +1,5 @@
/* eslint-disable prettier/prettier */ /* eslint-disable prettier/prettier */
/* eslint-disable @typescript-eslint/no-unused-vars */
import { LogisticController } from './logistic.controller'; import { LogisticController } from './logistic.controller';
import { LogisticService } from './logistic.service'; import { LogisticService } from './logistic.service';

View File

@@ -1,347 +1,344 @@
import { Get, HttpException, HttpStatus, Injectable, Query, UseGuards } from '@nestjs/common'; import {
import { createOracleConfig } from '../core/configs/typeorm.oracle.config'; HttpException,
import { createPostgresConfig } from '../core/configs/typeorm.postgres.config'; HttpStatus,
import { CarOutDelivery } from '../core/models/car-out-delivery.model'; Injectable,
import { DataSource } from 'typeorm'; } from '@nestjs/common';
import { CarInDelivery } from '../core/models/car-in-delivery.model'; import { createOracleConfig } from '../core/configs/typeorm.oracle.config';
import { ConfigService } from '@nestjs/config'; import { createPostgresConfig } from '../core/configs/typeorm.postgres.config';
import { CarOutDelivery } from '../core/models/car-out-delivery.model';
@Injectable() import { DataSource } from 'typeorm';
export class LogisticService { import { CarInDelivery } from '../core/models/car-in-delivery.model';
constructor(private readonly configService: ConfigService) {} import { ConfigService } from '@nestjs/config';
async getExpedicao() { @Injectable()
const dataSource = new DataSource(createPostgresConfig(this.configService)); export class LogisticService {
await dataSource.initialize(); constructor(private readonly configService: ConfigService) {}
const queryRunner = dataSource.createQueryRunner();
await queryRunner.connect(); async getExpedicao() {
try { const dataSource = new DataSource(createPostgresConfig(this.configService));
await dataSource.initialize();
const sqlWMS = `select dados.*, const queryRunner = dataSource.createQueryRunner();
( select count(distinct v.numero_carga) quantidade_cargas_embarcadas await queryRunner.connect();
from volume v, carga c2 try {
where v.numero_carga = c2.numero const sqlWMS = `select dados.*,
and c2.data_integracao >= TO_DATE('01/02/2025', 'DD/MM/YYYY') ( select count(distinct v.numero_carga) quantidade_cargas_embarcadas
and TO_DATE(RIGHT(c2.observacao, 10), 'DD/MM/YYYY') = dados.dataHoje from volume v, carga c2
and v.embarcado = 'S' ) quantidade_cargas_embarcadas where v.numero_carga = c2.numero
FROM ( select date_trunc('day', (CURRENT_DATE + INTERVAL '1 day'))::date data_saida, --TO_DATE(RIGHT(c.observacao, 10), 'DD/MM/YYYY') data_saida, and c2.data_integracao >= TO_DATE('01/02/2025', 'DD/MM/YYYY')
date_trunc('day', (CURRENT_DATE + INTERVAL '1 day'))::date dataHoje, and TO_DATE(RIGHT(c2.observacao, 10), 'DD/MM/YYYY') = dados.dataHoje
SUM(c.qt_itens_conferidos) total_itens_conferidos, and v.embarcado = 'S' ) quantidade_cargas_embarcadas
SUM(c.qt_itens_separados) total_itens_separados, FROM ( select date_trunc('day', (CURRENT_DATE + INTERVAL '1 day'))::date data_saida, --TO_DATE(RIGHT(c.observacao, 10), 'DD/MM/YYYY') data_saida,
SUM(c.qt_total_itens) quantidade_total_itens, date_trunc('day', (CURRENT_DATE + INTERVAL '1 day'))::date dataHoje,
SUM(c.qt_total_pedidos) quantidade_total, SUM(c.qt_itens_conferidos) total_itens_conferidos,
SUM(m.qt * p.peso_unidade) total_kg, SUM(c.qt_itens_separados) total_itens_separados,
COUNT(DISTINCT c.numero) quantidade_cargas, SUM(c.qt_total_itens) quantidade_total_itens,
COUNT(DISTINCT (CASE WHEN m.data_fim_separacao is not null then c.numero else null end)) quantidade_cargas_separacao_finalizadas, SUM(c.qt_total_pedidos) quantidade_total,
COUNT(DISTINCT (CASE WHEN m.data_fim_conferencia is not null then c.numero else null end)) quantidade_cargas_conferencia_finalizadas, SUM(m.qt * p.peso_unidade) total_kg,
SUM(case when m.data_inicio_separacao is null then m.qt * p.peso_unidade else 0 end) total_peso_separacao_nao_iniciada, COUNT(DISTINCT c.numero) quantidade_cargas,
SUM(case when m.data_inicio_separacao is not null and m.data_fim_separacao is null then m.qt * p.peso_unidade else 0 end) total_peso_em_separacao, COUNT(DISTINCT (CASE WHEN m.data_fim_separacao is not null then c.numero else null end)) quantidade_cargas_separacao_finalizadas,
SUM(case when m.data_fim_separacao is not null then m.qt * p.peso_unidade else 0 end) total_peso_separado, COUNT(DISTINCT (CASE WHEN m.data_fim_conferencia is not null then c.numero else null end)) quantidade_cargas_conferencia_finalizadas,
SUM(case when m.data_fim_separacao is not null and m.data_inicio_conferencia is null then m.qt * p.peso_unidade else 0 end) total_conferencia_nao_iniciada, SUM(case when m.data_inicio_separacao is null then m.qt * p.peso_unidade else 0 end) total_peso_separacao_nao_iniciada,
SUM(case when m.data_fim_separacao is not null and m.data_inicio_conferencia is not null and m.data_fim_conferencia is null then m.qt * p.peso_unidade else 0 end) total_peso_em_conferencia, SUM(case when m.data_inicio_separacao is not null and m.data_fim_separacao is null then m.qt * p.peso_unidade else 0 end) total_peso_em_separacao,
SUM(case when m.data_fim_conferencia is not null then m.qt * p.peso_unidade else 0 end) total_peso_conferido SUM(case when m.data_fim_separacao is not null then m.qt * p.peso_unidade else 0 end) total_peso_separado,
from movimentacao m , carga c , produto p SUM(case when m.data_fim_separacao is not null and m.data_inicio_conferencia is null then m.qt * p.peso_unidade else 0 end) total_conferencia_nao_iniciada,
where m.numero_carga = c.numero SUM(case when m.data_fim_separacao is not null and m.data_inicio_conferencia is not null and m.data_fim_conferencia is null then m.qt * p.peso_unidade else 0 end) total_peso_em_conferencia,
and m.produto_id = p.id SUM(case when m.data_fim_conferencia is not null then m.qt * p.peso_unidade else 0 end) total_peso_conferido
and m.data_integracao >= TO_DATE('01/01/2025', 'DD/MM/YYYY') from movimentacao m , carga c , produto p
and c.data_faturamento IS NULL where m.numero_carga = c.numero
and c.destino not like '%TRANSF%' and m.produto_id = p.id
and m.empresa_id in ( 3, 4 ) and m.data_integracao >= TO_DATE('01/01/2025', 'DD/MM/YYYY')
--group by TO_DATE(RIGHT(c.observacao, 10), 'DD/MM/YYYY') and c.data_faturamento IS NULL
) dados and c.destino not like '%TRANSF%'
where dados.data_saida >= current_date and m.empresa_id in ( 3, 4 )
ORDER BY dados.data_saida desc `; --group by TO_DATE(RIGHT(c.observacao, 10), 'DD/MM/YYYY')
) dados
where dados.data_saida >= current_date
const sql = `SELECT COUNT(DISTINCT PCCARREG.NUMCAR) as "qtde" ORDER BY dados.data_saida desc `;
,SUM(PCPEDI.QT * PCPRODUT.PESOBRUTO) as "totalKG"
,SUM(CASE WHEN PCPEDC.DTINICIALSEP IS NULL THEN PCPEDI.QT ELSE 0 END * PCPRODUT.PESOBRUTO) as "total_nao_iniciado" const sql = `SELECT COUNT(DISTINCT PCCARREG.NUMCAR) as "qtde"
,SUM(CASE WHEN PCPEDC.DTINICIALSEP IS NOT NULL ,SUM(PCPEDI.QT * PCPRODUT.PESOBRUTO) as "totalKG"
AND PCPEDC.DTFINALSEP IS NULL THEN PCPEDI.QT ELSE 0 END * PCPRODUT.PESOBRUTO) as "total_em_separacao" ,SUM(CASE WHEN PCPEDC.DTINICIALSEP IS NULL THEN PCPEDI.QT ELSE 0 END * PCPRODUT.PESOBRUTO) as "total_nao_iniciado"
,SUM(CASE WHEN PCPEDC.DTFINALSEP IS NOT NULL THEN PCPEDI.QT ELSE 0 END * PCPRODUT.PESOBRUTO) as "total_separado" ,SUM(CASE WHEN PCPEDC.DTINICIALSEP IS NOT NULL
,SUM(CASE WHEN PCPEDC.DTFINALSEP IS NOT NULL AND PCPEDC.DTFINALSEP IS NULL THEN PCPEDI.QT ELSE 0 END * PCPRODUT.PESOBRUTO) as "total_em_separacao"
AND PCPEDC.DTINICIALCHECKOUT IS NULL THEN PCPEDI.QT ELSE 0 END * PCPRODUT.PESOBRUTO) as "total_conferencia_nao_iniciada" ,SUM(CASE WHEN PCPEDC.DTFINALSEP IS NOT NULL THEN PCPEDI.QT ELSE 0 END * PCPRODUT.PESOBRUTO) as "total_separado"
,SUM(CASE WHEN PCPEDC.DTFINALSEP IS NOT NULL ,SUM(CASE WHEN PCPEDC.DTFINALSEP IS NOT NULL
AND PCPEDC.DTINICIALCHECKOUT IS NOT NULL AND PCPEDC.DTINICIALCHECKOUT IS NULL THEN PCPEDI.QT ELSE 0 END * PCPRODUT.PESOBRUTO) as "total_conferencia_nao_iniciada"
AND PCPEDC.DTFINALCHECKOUT IS NULL THEN PCPEDI.QT ELSE 0 END * PCPRODUT.PESOBRUTO) as "total_em_conferencia" ,SUM(CASE WHEN PCPEDC.DTFINALSEP IS NOT NULL
,SUM(CASE WHEN PCPEDC.DTFINALSEP IS NOT NULL AND PCPEDC.DTINICIALCHECKOUT IS NOT NULL
AND PCPEDC.DTFINALCHECKOUT IS NOT NULL THEN PCPEDI.QT ELSE 0 END * PCPRODUT.PESOBRUTO) as "total_coferencia_finalizada" AND PCPEDC.DTFINALCHECKOUT IS NULL THEN PCPEDI.QT ELSE 0 END * PCPRODUT.PESOBRUTO) as "total_em_conferencia"
FROM PCPEDI, PCPEDC, PCPRODUT, PCCARREG ,SUM(CASE WHEN PCPEDC.DTFINALSEP IS NOT NULL
WHERE PCPEDI.NUMPED = PCPEDC.NUMPED AND PCPEDC.DTFINALCHECKOUT IS NOT NULL THEN PCPEDI.QT ELSE 0 END * PCPRODUT.PESOBRUTO) as "total_coferencia_finalizada"
AND PCPEDI.CODPROD = PCPRODUT.CODPROD FROM PCPEDI, PCPEDC, PCPRODUT, PCCARREG
AND PCPEDI.NUMCAR = PCCARREG.NUMCAR WHERE PCPEDI.NUMPED = PCPEDC.NUMPED
AND PCPEDC.CODFILIAL = 12 AND PCPEDI.CODPROD = PCPRODUT.CODPROD
AND PCPEDI.TIPOENTREGA IN ('EN', 'EF') AND PCPEDI.NUMCAR = PCCARREG.NUMCAR
AND PCCARREG.DTSAIDA = TRUNC(SYSDATE)`; AND PCPEDC.CODFILIAL = 12
AND PCPEDI.TIPOENTREGA IN ('EN', 'EF')
const mov = await queryRunner.manager.query(sqlWMS); AND PCCARREG.DTSAIDA = TRUNC(SYSDATE)`;
const hoje = new Date(); const mov = await queryRunner.manager.query(sqlWMS);
let amanha = new Date(hoje); const hoje = new Date();
amanha.setDate(hoje.getDate() + 1);
const amanhaString = amanha.toISOString().split('T')[0]; let amanha = new Date(hoje);
amanha = new Date(amanhaString); amanha.setDate(hoje.getDate() + 1);
const amanhaString = amanha.toISOString().split('T')[0];
console.log(amanha); amanha = new Date(amanhaString);
console.log(JSON.stringify(mov));
console.log(amanha);
const movFiltered = mov.filter((m) => m.data_saida.toISOString().split('T')[0] == amanha.toISOString().split('T')[0]); console.log(JSON.stringify(mov));
return movFiltered; const movFiltered = mov.filter(
} catch (e) { (m) =>
console.log(e); m.data_saida.toISOString().split('T')[0] ==
} finally { amanha.toISOString().split('T')[0],
await queryRunner.release(); );
await dataSource.destroy();
} return movFiltered;
} } catch (e) {
console.log(e);
async getDeliveries(placa: string) { } finally {
const dataSource = new DataSource(createOracleConfig(this.configService)); await queryRunner.release();
await dataSource.initialize(); await dataSource.destroy();
const queryRunner = dataSource.createQueryRunner(); }
await queryRunner.connect(); }
try {
async getDeliveries(placa: string) {
const sql = `SELECT PCCARREG.NUMCAR as "id" const dataSource = new DataSource(createOracleConfig(this.configService));
,PCCARREG.DTSAIDA as "createDate" await dataSource.initialize();
,PCCARREG.DESTINO as "comment" const queryRunner = dataSource.createQueryRunner();
,PCCARREG.TOTPESO as "weight" await queryRunner.connect();
,PCCARREG.NUMNOTAS as "invoices" try {
,( SELECT COUNT(DISTINCT NVL(PCCLIENTENDENT.CODPRACAENT, PCPEDC.CODPRACA)) const sql = `SELECT PCCARREG.NUMCAR as "id"
FROM PCPEDC, PCCLIENTENDENT ,PCCARREG.DTSAIDA as "createDate"
WHERE PCPEDC.NUMCAR = PCCARREG.NUMCAR ,PCCARREG.DESTINO as "comment"
AND PCPEDC.CODENDENTCLI = PCCLIENTENDENT.CODENDENTCLI (+) ) as "citys" ,PCCARREG.TOTPESO as "weight"
,( SELECT COUNT(DISTINCT PCPEDC.CODCLI) FROM PCPEDC ,PCCARREG.NUMNOTAS as "invoices"
WHERE PCPEDC.NUMCAR = PCCARREG.NUMCAR) as "deliveries" ,( SELECT COUNT(DISTINCT NVL(PCCLIENTENDENT.CODPRACAENT, PCPEDC.CODPRACA))
,PCCARREG.CODMOTORISTA as "driverId" FROM PCPEDC, PCCLIENTENDENT
,PCEMPR.NOME as "driverName" WHERE PCPEDC.NUMCAR = PCCARREG.NUMCAR
,PCVEICUL.CODVEICULO as "carId" AND PCPEDC.CODENDENTCLI = PCCLIENTENDENT.CODENDENTCLI (+) ) as "citys"
,PCVEICUL.DESCRICAO as "carDescription" ,( SELECT COUNT(DISTINCT PCPEDC.CODCLI) FROM PCPEDC
,PCVEICUL.PLACA as "identification" WHERE PCPEDC.NUMCAR = PCCARREG.NUMCAR) as "deliveries"
,PCCARREG.CODFUNCAJUD as "helperId" ,PCCARREG.CODMOTORISTA as "driverId"
,PCCARREG.CODFUNCAJUD2 as "helperId1" ,PCEMPR.NOME as "driverName"
,PCCARREG.CODFUNCAJUD3 as "helperId2" ,PCVEICUL.CODVEICULO as "carId"
FROM PCCARREG, PCVEICUL, PCEMPR ,PCVEICUL.DESCRICAO as "carDescription"
WHERE PCCARREG.CODVEICULO = PCVEICUL.codveiculo (+) ,PCVEICUL.PLACA as "identification"
AND PCCARREG.CODMOTORISTA = PCEMPR.MATRICULA (+) ,PCCARREG.CODFUNCAJUD as "helperId"
AND PCCARREG.DTFECHA IS NULL ,PCCARREG.CODFUNCAJUD2 as "helperId1"
AND PCCARREG.DTSAIDA >= TRUNC(SYSDATE)`; ,PCCARREG.CODFUNCAJUD3 as "helperId2"
FROM PCCARREG, PCVEICUL, PCEMPR
const deliveries = await queryRunner.manager.query(sql); WHERE PCCARREG.CODVEICULO = PCVEICUL.codveiculo (+)
AND PCCARREG.CODMOTORISTA = PCEMPR.MATRICULA (+)
return deliveries; AND PCCARREG.DTFECHA IS NULL
} catch (e) { AND PCCARREG.DTSAIDA >= TRUNC(SYSDATE)`;
console.log(e);
} finally { const deliveries = await queryRunner.manager.query(sql);
await queryRunner.release();
await dataSource.destroy(); return deliveries;
} } catch (e) {
console.log(e);
} } finally {
await queryRunner.release();
async getStatusCar(placa: string) { await dataSource.destroy();
const dataSource = new DataSource(createPostgresConfig(this.configService)); }
await dataSource.initialize(); }
const queryRunner = dataSource.createQueryRunner();
await queryRunner.connect(); async getStatusCar(placa: string) {
try { const dataSource = new DataSource(createPostgresConfig(this.configService));
await dataSource.initialize();
const sql = `SELECT ESTSAIDAVEICULO.CODSAIDA FROM ESTSAIDAVEICULO, PCVEICUL const queryRunner = dataSource.createQueryRunner();
WHERE ESTSAIDAVEICULO.CODVEICULO = PCVEICUL.CODVEICULO await queryRunner.connect();
AND PCVEICUL.PLACA = '${placa}' try {
AND ESTSAIDAVEICULO.DTRETORNO IS NULL`; const sql = `SELECT ESTSAIDAVEICULO.CODSAIDA FROM ESTSAIDAVEICULO, PCVEICUL
WHERE ESTSAIDAVEICULO.CODVEICULO = PCVEICUL.CODVEICULO
const outCar = await queryRunner.manager.query(sql); AND PCVEICUL.PLACA = '${placa}'
AND ESTSAIDAVEICULO.DTRETORNO IS NULL`;
return { veiculoEmViagem: ( outCar.length > 0 ) ? true : false };
const outCar = await queryRunner.manager.query(sql);
} catch (e) {
console.log(e); return { veiculoEmViagem: outCar.length > 0 ? true : false };
} finally { } catch (e) {
await queryRunner.release(); console.log(e);
await dataSource.destroy(); } finally {
} await queryRunner.release();
} await dataSource.destroy();
}
async getEmployee() { }
const dataSource = new DataSource(createOracleConfig(this.configService));
await dataSource.initialize(); async getEmployee() {
const queryRunner = dataSource.createQueryRunner(); const dataSource = new DataSource(createOracleConfig(this.configService));
await queryRunner.connect(); await dataSource.initialize();
try { const queryRunner = dataSource.createQueryRunner();
const sql = `SELECT PCEMPR.MATRICULA as "id" await queryRunner.connect();
,PCEMPR.NOME as "name" try {
,PCEMPR.FUNCAO as "fuctionName" const sql = `SELECT PCEMPR.MATRICULA as "id"
FROM PCEMPR, PCCONSUM ,PCEMPR.NOME as "name"
WHERE PCEMPR.DTDEMISSAO IS NULL ,PCEMPR.FUNCAO as "fuctionName"
AND PCEMPR.CODSETOR = PCCONSUM.CODSETOREXPED FROM PCEMPR, PCCONSUM
ORDER BY PCEMPR.NOME `; WHERE PCEMPR.DTDEMISSAO IS NULL
const dataEmployee = await queryRunner.query(sql); AND PCEMPR.CODSETOR = PCCONSUM.CODSETOREXPED
ORDER BY PCEMPR.NOME `;
return dataEmployee; const dataEmployee = await queryRunner.query(sql);
} finally {
await queryRunner.release(); return dataEmployee;
await dataSource.destroy(); } finally {
} await queryRunner.release();
} await dataSource.destroy();
}
async createCarOut(data: CarOutDelivery) { }
const dataSource = new DataSource(createPostgresConfig(this.configService)); async createCarOut(data: CarOutDelivery) {
await dataSource.initialize(); const dataSource = new DataSource(createPostgresConfig(this.configService));
const queryRunner = dataSource.createQueryRunner(); await dataSource.initialize();
await queryRunner.connect(); const queryRunner = dataSource.createQueryRunner();
await queryRunner.startTransaction(); await queryRunner.connect();
try { await queryRunner.startTransaction();
try {
const sqlSequence = `SELECT ESS_SAIDAVEICULO.NEXTVAL as "id" FROM DUAL`; const sqlSequence = `SELECT ESS_SAIDAVEICULO.NEXTVAL as "id" FROM DUAL`;
const dataSequence = await queryRunner.query(sqlSequence); const dataSequence = await queryRunner.query(sqlSequence);
let i = 0; let i = 0;
let helperId1 = 0; let helperId1 = 0;
let helperId2 = 0; let helperId2 = 0;
let helperId3 = 0; let helperId3 = 0;
const image1 = ''; const image1 = '';
const image2 = ''; const image2 = '';
const image3 = ''; const image3 = '';
const image4 = ''; const image4 = '';
data.helpers.forEach(helper => { data.helpers.forEach((helper) => {
switch (i) { switch (i) {
case 0: case 0:
helperId1 = helper.id; helperId1 = helper.id;
break; break;
case 1: case 1:
helperId2 = helper.id; helperId2 = helper.id;
break; break;
case 2: case 2:
helperId3 = helper.id; helperId3 = helper.id;
break; break;
} }
i++; i++;
}); });
for (let y = 0; y < data.photos.length; y++) { for (let y = 0; y < data.photos.length; y++) {
const sqlImage = `INSERT INTO ESTSAIDAVEICULOIMAGENS ( CODSAIDA, TIPO, URL ) const sqlImage = `INSERT INTO ESTSAIDAVEICULOIMAGENS ( CODSAIDA, TIPO, URL )
VALUES (${dataSequence[0].id}, 'SA', '${data.photos[y]}' )`; VALUES (${dataSequence[0].id}, 'SA', '${data.photos[y]}' )`;
await queryRunner.query(sqlImage); await queryRunner.query(sqlImage);
} }
const sqlSaidaVeiculo = `INSERT INTO ESTSAIDAVEICULO ( CODSAIDA, CODVEICULO, DTSAIDA, QTAJUDANTES, CODFUNCSAIDA ) const sqlSaidaVeiculo = `INSERT INTO ESTSAIDAVEICULO ( CODSAIDA, CODVEICULO, DTSAIDA, QTAJUDANTES, CODFUNCSAIDA )
VALUES ( ${dataSequence[0].id}, ${data.vehicleCode}, SYSDATE, ${data.helpers.length}, VALUES ( ${dataSequence[0].id}, ${data.vehicleCode}, SYSDATE, ${data.helpers.length},
${data.userCode} )`; ${data.userCode} )`;
await queryRunner.query(sqlSaidaVeiculo); await queryRunner.query(sqlSaidaVeiculo);
for (let y = 0; y < data.numberLoading.length; y++) { for (let y = 0; y < data.numberLoading.length; y++) {
const sqlLoading = `INSERT INTO ESTSAIDAVEICULOCARREG ( CODSAIDA, NUMCAR ) const sqlLoading = `INSERT INTO ESTSAIDAVEICULOCARREG ( CODSAIDA, NUMCAR )
VALUES ( ${dataSequence[0].id}, ${data.numberLoading[y]})`; VALUES ( ${dataSequence[0].id}, ${data.numberLoading[y]})`;
await queryRunner.query(sqlLoading); await queryRunner.query(sqlLoading);
const sql = `UPDATE PCCARREG SET const sql = `UPDATE PCCARREG SET
DTSAIDAVEICULO = SYSDATE DTSAIDAVEICULO = SYSDATE
,CODFUNCAJUD = ${helperId1} ,CODFUNCAJUD = ${helperId1}
,CODFUNCAJUD2 = ${helperId2} ,CODFUNCAJUD2 = ${helperId2}
,CODFUNCAJUD3 = ${helperId3} ,CODFUNCAJUD3 = ${helperId3}
,KMINICIAL = ${data.startKm} ,KMINICIAL = ${data.startKm}
WHERE NUMCAR = ${data.numberLoading[y]}`; WHERE NUMCAR = ${data.numberLoading[y]}`;
await queryRunner.query(sql); await queryRunner.query(sql);
}
}
await queryRunner.commitTransaction();
await queryRunner.commitTransaction();
return { message: 'Dados da saída de veículo gravada com sucesso!' };
return { message: 'Dados da saída de veículo gravada com sucesso!'} } catch (e) {
await queryRunner.rollbackTransaction();
} catch (e) { throw e;
await queryRunner.rollbackTransaction(); } finally {
throw e; await queryRunner.release();
} finally { await dataSource.destroy();
await queryRunner.release(); }
await dataSource.destroy(); }
}
} async createCarIn(data: CarInDelivery) {
const dataSource = new DataSource(createPostgresConfig(this.configService));
async createCarIn(data: CarInDelivery) { await dataSource.initialize();
const queryRunner = dataSource.createQueryRunner();
const dataSource = new DataSource(createPostgresConfig(this.configService)); await queryRunner.connect();
await dataSource.initialize(); await queryRunner.startTransaction();
const queryRunner = dataSource.createQueryRunner(); try {
await queryRunner.connect(); const sqlOutCar = `SELECT ESTSAIDAVEICULO.CODSAIDA as "id"
await queryRunner.startTransaction(); FROM PCCARREG, PCVEICUL, ESTSAIDAVEICULO, ESTSAIDAVEICULOCARREG
try { WHERE PCCARREG.CODVEICULO = PCVEICUL.CODVEICULO
AND PCCARREG.NUMCAR = ESTSAIDAVEICULOCARREG.NUMCAR
const sqlOutCar = `SELECT ESTSAIDAVEICULO.CODSAIDA as "id" AND ESTSAIDAVEICULOCARREG.CODSAIDA = ESTSAIDAVEICULO.CODSAIDA
FROM PCCARREG, PCVEICUL, ESTSAIDAVEICULO, ESTSAIDAVEICULOCARREG -- AND ESTSAIDAVEICULO.DTRETORNO IS NULL
WHERE PCCARREG.CODVEICULO = PCVEICUL.CODVEICULO AND PCVEICUL.PLACA = '${data.licensePlate}'`;
AND PCCARREG.NUMCAR = ESTSAIDAVEICULOCARREG.NUMCAR const dataOutCar = await queryRunner.query(sqlOutCar);
AND ESTSAIDAVEICULOCARREG.CODSAIDA = ESTSAIDAVEICULO.CODSAIDA
-- AND ESTSAIDAVEICULO.DTRETORNO IS NULL if (dataOutCar.length == 0) {
AND PCVEICUL.PLACA = '${data.licensePlate}'`; throw new HttpException(
const dataOutCar = await queryRunner.query(sqlOutCar); 'Não foi localiza viagens em aberto para este veículo.',
HttpStatus.BAD_REQUEST,
if ( dataOutCar.length == 0 ) { );
throw new HttpException('Não foi localiza viagens em aberto para este veículo.', HttpStatus.BAD_REQUEST ); }
}
const i = 0;
const i = 0; const image1 = '';
const image1 = ''; const image2 = '';
const image2 = ''; const image3 = '';
const image3 = ''; const image4 = '';
const image4 = '';
for (let y = 0; y < data.invoices.length; y++) {
for (let y = 0; y < data.invoices.length; y++) { const invoice = data.invoices[y];
const invoice = data.invoices[y]; const sqlInvoice = `INSERT INTO ESTRETORNONF ( CODSAIDA, NUMCAR, NUMNOTA, SITUACAO, MOTIVO )
const sqlInvoice = `INSERT INTO ESTRETORNONF ( CODSAIDA, NUMCAR, NUMNOTA, SITUACAO, MOTIVO ) VALUES ( ${dataOutCar[0].id}, ${invoice.loadingNumber}, ${invoice.invoiceNumber},
VALUES ( ${dataOutCar[0].id}, ${invoice.loadingNumber}, ${invoice.invoiceNumber}, '${invoice.status}', '${invoice.reasonText}')`;
'${invoice.status}', '${invoice.reasonText}')`; await queryRunner.query(sqlInvoice);
await queryRunner.query(sqlInvoice); }
}
const updateCarreg = `UPDATE PCCARREG SET
const updateCarreg = `UPDATE PCCARREG SET PCCARREG.DTRETORNO = SYSDATE
PCCARREG.DTRETORNO = SYSDATE ,PCCARREG.KMFINAL = ${data.finalKm}
,PCCARREG.KMFINAL = ${data.finalKm} WHERE PCCARREG.NUMCAR IN ( SELECT SC.NUMCAR
WHERE PCCARREG.NUMCAR IN ( SELECT SC.NUMCAR FROM ESTSAIDAVEICULOCARREG SC
FROM ESTSAIDAVEICULOCARREG SC WHERE SC.CODSAIDA = ${dataOutCar[0].id} )`;
WHERE SC.CODSAIDA = ${dataOutCar[0].id} )`; await queryRunner.query(updateCarreg);
await queryRunner.query(updateCarreg);
for (let i = 0; i < data.images.length; i++) {
for (let i = 0; i < data.images.length; i++) { const sqlImage = `INSERT INTO ESTSAIDAVEICULOIMAGENS ( CODSAIDA, TIPO, URL )
const sqlImage = `INSERT INTO ESTSAIDAVEICULOIMAGENS ( CODSAIDA, TIPO, URL ) VALUES (${dataOutCar[0].id}, 'RE', '${data.images[i]}' )`;
VALUES (${dataOutCar[0].id}, 'RE', '${data.images[i]}' )`; await queryRunner.query(sqlImage);
await queryRunner.query(sqlImage); }
}
const sqlInCar = `UPDATE ESTSAIDAVEICULO SET
const sqlInCar = `UPDATE ESTSAIDAVEICULO SET ESTSAIDAVEICULO.DTRETORNO = SYSDATE
ESTSAIDAVEICULO.DTRETORNO = SYSDATE ,ESTSAIDAVEICULO.QTPALETES_PBR = ${data.qtdPaletesPbr}
,ESTSAIDAVEICULO.QTPALETES_PBR = ${data.qtdPaletesPbr} ,ESTSAIDAVEICULO.QTPALETES_CIM = ${data.qtdPaletesCim}
,ESTSAIDAVEICULO.QTPALETES_CIM = ${data.qtdPaletesCim} ,ESTSAIDAVEICULO.QTPALETES_DES = ${data.qtdPaletesDes}
,ESTSAIDAVEICULO.QTPALETES_DES = ${data.qtdPaletesDes} ,ESTSAIDAVEICULO.codfuncretorno = ${data.userId}
,ESTSAIDAVEICULO.codfuncretorno = ${data.userId} ,ESTSAIDAVEICULO.obsretorno = '${data.observation}'
,ESTSAIDAVEICULO.obsretorno = '${data.observation}' ,ESTSAIDAVEICULO.HOUVESOBRA = '${data.remnant}'
,ESTSAIDAVEICULO.HOUVESOBRA = '${data.remnant}' ,ESTSAIDAVEICULO.OBSSOBRA = '${data.observationRemnant}'
,ESTSAIDAVEICULO.OBSSOBRA = '${data.observationRemnant}' WHERE ESTSAIDAVEICULO.CODSAIDA = ${dataOutCar[0].id}`;
WHERE ESTSAIDAVEICULO.CODSAIDA = ${dataOutCar[0].id}`;
await queryRunner.query(sqlInCar);
await queryRunner.query(sqlInCar); for (let i = 0; i < data.imagesRemnant.length; i++) {
for (let i = 0; i < data.imagesRemnant.length; i++) { const sqlImage = `INSERT INTO ESTSAIDAVEICULOIMAGENS ( CODSAIDA, TIPO, URL )
const sqlImage = `INSERT INTO ESTSAIDAVEICULOIMAGENS ( CODSAIDA, TIPO, URL ) VALUES (${dataOutCar[0].id}, 'SO', '${data.imagesRemnant[i]}' )`;
VALUES (${dataOutCar[0].id}, 'SO', '${data.imagesRemnant[i]}' )`; await queryRunner.query(sqlImage);
await queryRunner.query(sqlImage); }
}
await queryRunner.commitTransaction();
await queryRunner.commitTransaction();
return { message: 'Dados de retorno do veículo gravada com sucesso!' };
return { message: 'Dados de retorno do veículo gravada com sucesso!'} } catch (e) {
await queryRunner.rollbackTransaction();
} catch (e) { console.log(e);
await queryRunner.rollbackTransaction(); throw e;
console.log(e); } finally {
throw e; await queryRunner.release();
} finally { await dataSource.destroy();
await queryRunner.release(); }
await dataSource.destroy(); }
} }
}
}

View File

@@ -12,30 +12,37 @@ async function bootstrap() {
* Configura timezone para horário brasileiro * Configura timezone para horário brasileiro
*/ */
process.env.TZ = 'America/Sao_Paulo'; process.env.TZ = 'America/Sao_Paulo';
const app = await NestFactory.create<NestExpressApplication>(AppModule); const app = await NestFactory.create<NestExpressApplication>(AppModule);
app.use(helmet({ app.use(
contentSecurityPolicy: { helmet({
directives: { contentSecurityPolicy: {
defaultSrc: [`'self'`], directives: {
scriptSrc: [`'self'`, `'unsafe-inline'`, 'cdn.jsdelivr.net', 'cdnjs.cloudflare.com'], defaultSrc: [`'self'`],
styleSrc: [`'self'`, `'unsafe-inline'`, 'cdnjs.cloudflare.com'], scriptSrc: [
imgSrc: [`'self'`, 'data:'], `'self'`,
connectSrc: [`'self'`], `'unsafe-inline'`,
fontSrc: [`'self'`, 'cdnjs.cloudflare.com'], 'cdn.jsdelivr.net',
'cdnjs.cloudflare.com',
],
styleSrc: [`'self'`, `'unsafe-inline'`, 'cdnjs.cloudflare.com'],
imgSrc: [`'self'`, 'data:'],
connectSrc: [`'self'`],
fontSrc: [`'self'`, 'cdnjs.cloudflare.com'],
},
}, },
}, }),
})); );
// Configurar pasta de arquivos estáticos // Configurar pasta de arquivos estáticos
app.useStaticAssets(join(__dirname, '..', 'public'), { app.useStaticAssets(join(__dirname, '..', 'public'), {
index: false, index: false,
prefix: '/dashboard', prefix: '/dashboard',
}); });
app.useGlobalInterceptors(new ResponseInterceptor()); app.useGlobalInterceptors(new ResponseInterceptor());
app.useGlobalPipes( app.useGlobalPipes(
new ValidationPipe({ new ValidationPipe({
whitelist: true, whitelist: true,
@@ -56,19 +63,16 @@ async function bootstrap() {
allowedHeaders: ['Content-Type', 'Authorization', 'Accept'], allowedHeaders: ['Content-Type', 'Authorization', 'Accept'],
}); });
const config = new DocumentBuilder() const config = new DocumentBuilder()
.setTitle('Portal Jurunense API') .setTitle('Portal Jurunense API')
.setDescription('Documentação da API do Portal Jurunense') .setDescription('Documentação da API do Portal Jurunense')
.setVersion('1.0') .setVersion('1.0')
.addBearerAuth() .addBearerAuth()
.build(); .build();
const document = SwaggerModule.createDocument(app, config); const document = SwaggerModule.createDocument(app, config);
SwaggerModule.setup('docs', app, document); SwaggerModule.setup('docs', app, document);
await app.listen(8066); await app.listen(8066);
} }
bootstrap(); bootstrap();

View File

@@ -14,4 +14,4 @@ export class CreateInvoiceDto {
required: true, required: true,
}) })
userId: number; userId: number;
} }

View File

@@ -38,7 +38,7 @@ export class CreatePaymentDto {
@ApiProperty({ @ApiProperty({
description: 'Valor do pagamento', description: 'Valor do pagamento',
example: 1000.00, example: 1000.0,
required: true, required: true,
}) })
amount: number; amount: number;
@@ -63,4 +63,4 @@ export class CreatePaymentDto {
required: true, required: true,
}) })
userId: number; userId: number;
} }

View File

@@ -69,7 +69,7 @@ export class OrderDto {
@ApiProperty({ @ApiProperty({
description: 'Valor total do pedido', description: 'Valor total do pedido',
example: 1000.00, example: 1000.0,
}) })
amount: number; amount: number;
@@ -81,11 +81,11 @@ export class OrderDto {
@ApiProperty({ @ApiProperty({
description: 'Valor total pago', description: 'Valor total pago',
example: 1000.00, example: 1000.0,
}) })
amountPaid: number; amountPaid: number;
constructor(partial: Partial<OrderDto>) { constructor(partial: Partial<OrderDto>) {
Object.assign(this, partial); Object.assign(this, partial);
} }
} }

View File

@@ -39,7 +39,7 @@ export class PaymentDto {
@ApiProperty({ @ApiProperty({
description: 'Valor do pagamento', description: 'Valor do pagamento',
example: 1000.00, example: 1000.0,
}) })
amount: number; amount: number;
@@ -64,4 +64,4 @@ export class PaymentDto {
constructor(partial: Partial<PaymentDto>) { constructor(partial: Partial<PaymentDto>) {
Object.assign(this, partial); Object.assign(this, partial);
} }
} }

View File

@@ -1,77 +1,82 @@
import { Body, Controller, Get, Param, Post, UseGuards } from '@nestjs/common'; import { Body, Controller, Get, Param, Post, UseGuards } from '@nestjs/common';
import { ApiTags, ApiOperation, ApiParam, ApiResponse, ApiBearerAuth } from '@nestjs/swagger'; import {
import { OrdersPaymentService } from './orders-payment.service'; ApiTags,
import { OrderDto } from './dto/order.dto'; ApiOperation,
import { PaymentDto } from './dto/payment.dto'; ApiParam,
import { CreatePaymentDto } from './dto/create-payment.dto'; ApiResponse,
import { CreateInvoiceDto } from './dto/create-invoice.dto'; ApiBearerAuth,
import { JwtAuthGuard } from 'src/auth/guards/jwt-auth.guard'; } from '@nestjs/swagger';
import { OrdersPaymentService } from './orders-payment.service';
@ApiTags('Orders Payment') import { OrderDto } from './dto/order.dto';
@ApiBearerAuth() import { PaymentDto } from './dto/payment.dto';
@UseGuards(JwtAuthGuard) import { CreatePaymentDto } from './dto/create-payment.dto';
@Controller('api/v1/orders-payment') import { CreateInvoiceDto } from './dto/create-invoice.dto';
export class OrdersPaymentController { import { JwtAuthGuard } from 'src/auth/guards/jwt-auth.guard';
constructor(private readonly orderPaymentService: OrdersPaymentService){} @ApiTags('Orders Payment')
@ApiBearerAuth()
@Get('orders/:id') @UseGuards(JwtAuthGuard)
@ApiOperation({ summary: 'Lista todos os pedidos de uma loja' }) @Controller('api/v1/orders-payment')
@ApiParam({ name: 'id', description: 'ID da loja' }) export class OrdersPaymentController {
@ApiResponse({ constructor(private readonly orderPaymentService: OrdersPaymentService) {}
status: 200,
description: 'Lista de pedidos retornada com sucesso', @Get('orders/:id')
type: [OrderDto] @ApiOperation({ summary: 'Lista todos os pedidos de uma loja' })
}) @ApiParam({ name: 'id', description: 'ID da loja' })
async findOrders(@Param('id') storeId: string): Promise<OrderDto[]> { @ApiResponse({
return this.orderPaymentService.findOrders(storeId, 0); status: 200,
} description: 'Lista de pedidos retornada com sucesso',
type: [OrderDto],
@Get('orders/:id/:orderId') })
@ApiOperation({ summary: 'Busca um pedido específico' }) async findOrders(@Param('id') storeId: string): Promise<OrderDto[]> {
@ApiParam({ name: 'id', description: 'ID da loja' }) return this.orderPaymentService.findOrders(storeId, 0);
@ApiParam({ name: 'orderId', description: 'ID do pedido' }) }
@ApiResponse({
status: 200, @Get('orders/:id/:orderId')
description: 'Pedido retornado com sucesso', @ApiOperation({ summary: 'Busca um pedido específico' })
type: OrderDto @ApiParam({ name: 'id', description: 'ID da loja' })
}) @ApiParam({ name: 'orderId', description: 'ID do pedido' })
async findOrder( @ApiResponse({
@Param('id') storeId: string, status: 200,
@Param('orderId') orderId: number, description: 'Pedido retornado com sucesso',
): Promise<OrderDto> { type: OrderDto,
const orders = await this.orderPaymentService.findOrders(storeId, orderId); })
return orders[0]; async findOrder(
} @Param('id') storeId: string,
@Param('orderId') orderId: number,
@Get('payments/:id') ): Promise<OrderDto> {
@ApiOperation({ summary: 'Lista todos os pagamentos de um pedido' }) const orders = await this.orderPaymentService.findOrders(storeId, orderId);
@ApiParam({ name: 'id', description: 'ID do pedido' }) return orders[0];
@ApiResponse({ }
status: 200,
description: 'Lista de pagamentos retornada com sucesso', @Get('payments/:id')
type: [PaymentDto] @ApiOperation({ summary: 'Lista todos os pagamentos de um pedido' })
}) @ApiParam({ name: 'id', description: 'ID do pedido' })
async findPayments(@Param('id') orderId: number): Promise<PaymentDto[]> { @ApiResponse({
return this.orderPaymentService.findPayments(orderId); status: 200,
} description: 'Lista de pagamentos retornada com sucesso',
@Post('payments/create') type: [PaymentDto],
@ApiOperation({ summary: 'Cria um novo pagamento' }) })
@ApiResponse({ async findPayments(@Param('id') orderId: number): Promise<PaymentDto[]> {
status: 201, return this.orderPaymentService.findPayments(orderId);
description: 'Pagamento criado com sucesso' }
}) @Post('payments/create')
async createPayment(@Body() data: CreatePaymentDto): Promise<void> { @ApiOperation({ summary: 'Cria um novo pagamento' })
return this.orderPaymentService.createPayment(data); @ApiResponse({
} status: 201,
description: 'Pagamento criado com sucesso',
@Post('invoice/create') })
@ApiOperation({ summary: 'Cria uma nova fatura' }) async createPayment(@Body() data: CreatePaymentDto): Promise<void> {
@ApiResponse({ return this.orderPaymentService.createPayment(data);
status: 201, }
description: 'Fatura criada com sucesso'
}) @Post('invoice/create')
async createInvoice(@Body() data: CreateInvoiceDto): Promise<void> { @ApiOperation({ summary: 'Cria uma nova fatura' })
return this.orderPaymentService.createInvoice(data); @ApiResponse({
} status: 201,
} description: 'Fatura criada com sucesso',
})
async createInvoice(@Body() data: CreateInvoiceDto): Promise<void> {
return this.orderPaymentService.createInvoice(data);
}
}

View File

@@ -1,5 +1,5 @@
/* eslint-disable prettier/prettier */ /* eslint-disable prettier/prettier */
/* eslint-disable @typescript-eslint/no-unused-vars */
/* /*
https://docs.nestjs.com/modules https://docs.nestjs.com/modules

View File

@@ -1,120 +1,120 @@
import { Injectable, Inject } from '@nestjs/common'; import { Injectable, Inject } from '@nestjs/common';
import { DataSource } from 'typeorm'; import { DataSource } from 'typeorm';
import { ConfigService } from '@nestjs/config'; import { ConfigService } from '@nestjs/config';
import { DATA_SOURCE } from '../core/constants'; import { DATA_SOURCE } from '../core/constants';
import { OrderDto } from './dto/order.dto'; import { OrderDto } from './dto/order.dto';
import { PaymentDto } from './dto/payment.dto'; import { PaymentDto } from './dto/payment.dto';
import { CreatePaymentDto } from './dto/create-payment.dto'; import { CreatePaymentDto } from './dto/create-payment.dto';
import { CreateInvoiceDto } from './dto/create-invoice.dto'; import { CreateInvoiceDto } from './dto/create-invoice.dto';
@Injectable() @Injectable()
export class OrdersPaymentService { export class OrdersPaymentService {
constructor( constructor(
private readonly configService: ConfigService, private readonly configService: ConfigService,
@Inject(DATA_SOURCE) private readonly dataSource: DataSource @Inject(DATA_SOURCE) private readonly dataSource: DataSource,
) {} ) {}
async findOrders(storeId: string, orderId: number): Promise<OrderDto[]> { async findOrders(storeId: string, orderId: number): Promise<OrderDto[]> {
const queryRunner = this.dataSource.createQueryRunner(); const queryRunner = this.dataSource.createQueryRunner();
await queryRunner.connect(); await queryRunner.connect();
try { try {
const sql = `SELECT PCPEDC.DATA as "createDate" const sql = `SELECT PCPEDC.DATA as "createDate"
,PCPEDC.CODFILIAL as "storeId" ,PCPEDC.CODFILIAL as "storeId"
,PCPEDC.NUMPED as "orderId" ,PCPEDC.NUMPED as "orderId"
,PCPEDC.CODCLI as "customerId" ,PCPEDC.CODCLI as "customerId"
,PCCLIENT.CLIENTE as "customerName" ,PCCLIENT.CLIENTE as "customerName"
,PCPEDC.CODUSUR as "sellerId" ,PCPEDC.CODUSUR as "sellerId"
,PCUSUARI.NOME as "sellerName" ,PCUSUARI.NOME as "sellerName"
,PCPEDC.CODCOB as "billingId" ,PCPEDC.CODCOB as "billingId"
,PCCOB.COBRANCA as "billingName" ,PCCOB.COBRANCA as "billingName"
,PCPEDC.CODPLPAG as "planId" ,PCPEDC.CODPLPAG as "planId"
,PCPLPAG.DESCRICAO as "planName" ,PCPLPAG.DESCRICAO as "planName"
,ROUND(PCPEDC.VLATEND,2) as "amount" ,ROUND(PCPEDC.VLATEND,2) as "amount"
,NVL(PCPLPAG.NUMPARCELAS,1) as "installments" ,NVL(PCPLPAG.NUMPARCELAS,1) as "installments"
,( SELECT SUM(ESTPAGAMENTO.VALOR) FROM ESTPAGAMENTO ,( SELECT SUM(ESTPAGAMENTO.VALOR) FROM ESTPAGAMENTO
WHERE ESTPAGAMENTO.NUMORCA = PCPEDC.NUMPED ) as "amountPaid" WHERE ESTPAGAMENTO.NUMORCA = PCPEDC.NUMPED ) as "amountPaid"
FROM PCPEDC, PCCLIENT, PCUSUARI, PCCOB, PCPLPAG FROM PCPEDC, PCCLIENT, PCUSUARI, PCCOB, PCPLPAG
WHERE PCPEDC.CODCLI = PCCLIENT.CODCLI WHERE PCPEDC.CODCLI = PCCLIENT.CODCLI
AND PCPEDC.CODUSUR = PCUSUARI.CODUSUR AND PCPEDC.CODUSUR = PCUSUARI.CODUSUR
AND PCPEDC.CODPLPAG = PCPLPAG.CODPLPAG AND PCPEDC.CODPLPAG = PCPLPAG.CODPLPAG
AND PCPEDC.CODCOB = PCCOB.CODCOB AND PCPEDC.CODCOB = PCCOB.CODCOB
AND PCPEDC.CONDVENDA = 7 AND PCPEDC.CONDVENDA = 7
AND PCPEDC.POSICAO IN ('L') AND PCPEDC.POSICAO IN ('L')
AND PCPEDC.DATA >= TRUNC(SYSDATE) - 5 AND PCPEDC.DATA >= TRUNC(SYSDATE) - 5
AND PCPEDC.CODFILIAL = ${storeId} `; AND PCPEDC.CODFILIAL = ${storeId} `;
let sqlWhere = ''; let sqlWhere = '';
if (orderId > 0) { if (orderId > 0) {
sqlWhere += ` AND PCPEDC.NUMPED = ${orderId}`; sqlWhere += ` AND PCPEDC.NUMPED = ${orderId}`;
} }
const orders = await queryRunner.manager.query(sql + sqlWhere); const orders = await queryRunner.manager.query(sql + sqlWhere);
return orders.map(order => new OrderDto(order)); return orders.map((order) => new OrderDto(order));
} finally { } finally {
await queryRunner.release(); await queryRunner.release();
} }
} }
async findPayments(orderId: number): Promise<PaymentDto[]> { async findPayments(orderId: number): Promise<PaymentDto[]> {
const queryRunner = this.dataSource.createQueryRunner(); const queryRunner = this.dataSource.createQueryRunner();
await queryRunner.connect(); await queryRunner.connect();
try { try {
const sql = `SELECT const sql = `SELECT
ESTPAGAMENTO.NUMORCA as "orderId" ESTPAGAMENTO.NUMORCA as "orderId"
,ESTPAGAMENTO.DTPAGAMENTO as "payDate" ,ESTPAGAMENTO.DTPAGAMENTO as "payDate"
,ESTPAGAMENTO.CARTAO as "card" ,ESTPAGAMENTO.CARTAO as "card"
,ESTPAGAMENTO.PARCELAS as "installments" ,ESTPAGAMENTO.PARCELAS as "installments"
,ESTPAGAMENTO.NOMEBANDEIRA as "flagName" ,ESTPAGAMENTO.NOMEBANDEIRA as "flagName"
,ESTPAGAMENTO.FORMAPAGTO as "type" ,ESTPAGAMENTO.FORMAPAGTO as "type"
,ESTPAGAMENTO.VALOR as "amount" ,ESTPAGAMENTO.VALOR as "amount"
,ESTPAGAMENTO.CODFUNC as "userId" ,ESTPAGAMENTO.CODFUNC as "userId"
,ESTPAGAMENTO.NSU as "nsu" ,ESTPAGAMENTO.NSU as "nsu"
,ESTPAGAMENTO.CODAUTORIZACAO as "auth" ,ESTPAGAMENTO.CODAUTORIZACAO as "auth"
FROM ESTPAGAMENTO FROM ESTPAGAMENTO
WHERE ESTPAGAMENTO.NUMORCA = ${orderId}`; WHERE ESTPAGAMENTO.NUMORCA = ${orderId}`;
const payments = await queryRunner.manager.query(sql); const payments = await queryRunner.manager.query(sql);
return payments.map(payment => new PaymentDto(payment)); return payments.map((payment) => new PaymentDto(payment));
} finally { } finally {
await queryRunner.release(); await queryRunner.release();
} }
} }
async createPayment(payment: CreatePaymentDto): Promise<void> { async createPayment(payment: CreatePaymentDto): Promise<void> {
const queryRunner = this.dataSource.createQueryRunner(); const queryRunner = this.dataSource.createQueryRunner();
await queryRunner.connect(); await queryRunner.connect();
await queryRunner.startTransaction(); await queryRunner.startTransaction();
try { try {
const sql = `INSERT INTO ESTPAGAMENTO ( NUMORCA, DTPAGAMENTO, CARTAO, CODAUTORIZACAO, CODRESPOSTA, DTREQUISICAO, DTSERVIDOR, IDTRANSACAO, const sql = `INSERT INTO ESTPAGAMENTO ( NUMORCA, DTPAGAMENTO, CARTAO, CODAUTORIZACAO, CODRESPOSTA, DTREQUISICAO, DTSERVIDOR, IDTRANSACAO,
NSU, PARCELAS, VALOR, NOMEBANDEIRA, FORMAPAGTO, DTPROCESSAMENTO, CODFUNC ) NSU, PARCELAS, VALOR, NOMEBANDEIRA, FORMAPAGTO, DTPROCESSAMENTO, CODFUNC )
VALUES ( ${payment.orderId}, TRUNC(SYSDATE), '${payment.card}', '${payment.auth}', '00', SYSDATE, SYSDATE, NULL, VALUES ( ${payment.orderId}, TRUNC(SYSDATE), '${payment.card}', '${payment.auth}', '00', SYSDATE, SYSDATE, NULL,
'${payment.nsu}', ${payment.installments}, ${payment.amount}, '${payment.flagName}', '${payment.nsu}', ${payment.installments}, ${payment.amount}, '${payment.flagName}',
'${payment.paymentType}', SYSDATE, ${payment.userId} ) `; '${payment.paymentType}', SYSDATE, ${payment.userId} ) `;
await queryRunner.manager.query(sql); await queryRunner.manager.query(sql);
await queryRunner.commitTransaction(); await queryRunner.commitTransaction();
} catch (error) { } catch (error) {
await queryRunner.rollbackTransaction(); await queryRunner.rollbackTransaction();
throw error; throw error;
} finally { } finally {
await queryRunner.release(); await queryRunner.release();
} }
} }
async createInvoice(data: CreateInvoiceDto): Promise<void> { async createInvoice(data: CreateInvoiceDto): Promise<void> {
const queryRunner = this.dataSource.createQueryRunner(); const queryRunner = this.dataSource.createQueryRunner();
await queryRunner.connect(); await queryRunner.connect();
await queryRunner.startTransaction(); await queryRunner.startTransaction();
try { try {
const sql = `BEGIN const sql = `BEGIN
ESK_FATURAMENTO.FATURAMENTO_VENDA_ASSISTIDA(${data.orderId}, ${data.userId}); ESK_FATURAMENTO.FATURAMENTO_VENDA_ASSISTIDA(${data.orderId}, ${data.userId});
END;`; END;`;
await queryRunner.manager.query(sql); await queryRunner.manager.query(sql);
await queryRunner.commitTransaction(); await queryRunner.commitTransaction();
} catch (error) { } catch (error) {
await queryRunner.rollbackTransaction(); await queryRunner.rollbackTransaction();
throw error; throw error;
} finally { } finally {
await queryRunner.release(); await queryRunner.release();
} }
} }
} }

View File

@@ -0,0 +1,40 @@
import { Test, TestingModule } from '@nestjs/testing';
import { DebService } from '../deb.service';
import { DebRepository } from '../../repositories/deb.repository';
export const createMockRepository = (
methods: Partial<DebRepository> = {},
) =>
({
findByCpfCgcent: jest.fn(),
...methods,
} as any);
export interface DebServiceTestContext {
service: DebService;
mockRepository: jest.Mocked<DebRepository>;
}
export async function createDebServiceTestModule(
repositoryMethods: Partial<DebRepository> = {},
): Promise<DebServiceTestContext> {
const mockRepository = createMockRepository(repositoryMethods);
const module: TestingModule = await Test.createTestingModule({
providers: [
DebService,
{
provide: DebRepository,
useValue: mockRepository,
},
],
}).compile();
const service = module.get<DebService>(DebService);
return {
service,
mockRepository,
};
}

View File

@@ -0,0 +1,191 @@
import { createDebServiceTestModule } from './deb.service.spec.helper';
import { DebDto } from '../../dto/DebDto';
describe('DebService', () => {
describe('findByCpfCgcent', () => {
let context: Awaited<ReturnType<typeof createDebServiceTestModule>>;
beforeEach(async () => {
context = await createDebServiceTestModule();
});
afterEach(() => {
jest.clearAllMocks();
});
it('deve buscar débitos por CPF/CGCENT com sucesso', async () => {
const mockDebs: DebDto[] = [
{
dtemissao: new Date('2024-01-15'),
codfilial: '1',
duplic: '12345',
prest: '1',
codcli: 1000,
cliente: 'JOÃO DA SILVA',
codcob: 'BL',
cobranca: 'BOLETO',
dtvenc: new Date('2024-02-15'),
dtpag: null,
valor: 150.5,
situacao: 'A VENCER',
},
{
dtemissao: new Date('2024-01-20'),
codfilial: '1',
duplic: '12346',
prest: '2',
codcli: 1000,
cliente: 'JOÃO DA SILVA',
codcob: 'BL',
cobranca: 'BOLETO',
dtvenc: new Date('2024-02-20'),
dtpag: new Date('2024-02-10'),
valor: 200.0,
situacao: 'PAGO',
},
];
context.mockRepository.findByCpfCgcent.mockResolvedValue(mockDebs);
const result = await context.service.findByCpfCgcent('12345678900');
expect(result).toHaveLength(2);
expect(result[0].codcli).toBe(1000);
expect(result[0].cliente).toBe('JOÃO DA SILVA');
expect(result[0].situacao).toBe('A VENCER');
expect(result[1].situacao).toBe('PAGO');
expect(context.mockRepository.findByCpfCgcent).toHaveBeenCalledWith(
'12345678900',
undefined,
undefined,
);
});
it('deve buscar débitos com matricula informada', async () => {
const mockDebs: DebDto[] = [
{
dtemissao: new Date('2024-01-15'),
codfilial: '1',
duplic: '12345',
prest: '1',
codcli: 1000,
cliente: 'JOÃO DA SILVA',
codcob: 'BL',
cobranca: 'BOLETO',
dtvenc: new Date('2024-02-15'),
dtpag: null,
valor: 150.5,
situacao: 'A VENCER',
},
];
context.mockRepository.findByCpfCgcent.mockResolvedValue(mockDebs);
const result = await context.service.findByCpfCgcent(
'12345678900',
1498,
);
expect(result).toHaveLength(1);
expect(context.mockRepository.findByCpfCgcent).toHaveBeenCalledWith(
'12345678900',
1498,
undefined,
);
});
it('deve buscar débitos com cobranca informada', async () => {
const mockDebs: DebDto[] = [
{
dtemissao: new Date('2024-01-15'),
codfilial: '1',
duplic: '12345',
prest: '1',
codcli: 1000,
cliente: 'JOÃO DA SILVA',
codcob: 'BL',
cobranca: 'BOLETO',
dtvenc: new Date('2024-02-15'),
dtpag: null,
valor: 150.5,
situacao: 'A VENCER',
},
];
context.mockRepository.findByCpfCgcent.mockResolvedValue(mockDebs);
const result = await context.service.findByCpfCgcent(
'12345678900',
undefined,
'BL',
);
expect(result).toHaveLength(1);
expect(context.mockRepository.findByCpfCgcent).toHaveBeenCalledWith(
'12345678900',
undefined,
'BL',
);
});
it('deve buscar débitos com matricula e cobranca informadas', async () => {
const mockDebs: DebDto[] = [
{
dtemissao: new Date('2024-01-15'),
codfilial: '1',
duplic: '12345',
prest: '1',
codcli: 1000,
cliente: 'JOÃO DA SILVA',
codcob: 'BL',
cobranca: 'BOLETO',
dtvenc: new Date('2024-02-15'),
dtpag: null,
valor: 150.5,
situacao: 'A VENCER',
},
];
context.mockRepository.findByCpfCgcent.mockResolvedValue(mockDebs);
const result = await context.service.findByCpfCgcent(
'12345678900',
1498,
'BL',
);
expect(result).toHaveLength(1);
expect(context.mockRepository.findByCpfCgcent).toHaveBeenCalledWith(
'12345678900',
1498,
'BL',
);
});
it('deve retornar array vazio quando nenhum débito é encontrado', async () => {
context.mockRepository.findByCpfCgcent.mockResolvedValue([]);
const result = await context.service.findByCpfCgcent('99999999999');
expect(result).toHaveLength(0);
expect(Array.isArray(result)).toBe(true);
expect(context.mockRepository.findByCpfCgcent).toHaveBeenCalledWith(
'99999999999',
undefined,
undefined,
);
});
it('deve propagar erro do repositório', async () => {
const repositoryError = new Error('Database connection failed');
context.mockRepository.findByCpfCgcent.mockRejectedValue(
repositoryError,
);
await expect(
context.service.findByCpfCgcent('12345678900'),
).rejects.toThrow('Database connection failed');
});
});
});

View File

@@ -0,0 +1,60 @@
import { Test, TestingModule } from '@nestjs/testing';
import { OrdersService } from '../orders.service';
import { OrdersRepository } from '../../repositories/orders.repository';
import { IRedisClient } from '../../../core/configs/cache/IRedisClient';
import { RedisClientToken } from '../../../core/configs/cache/redis-client.adapter.provider';
export const createMockRepository = (
methods: Partial<OrdersRepository> = {},
) =>
({
findOrders: jest.fn(),
getCompletedDeliveries: jest.fn(),
...methods,
} as any);
export const createMockRedisClient = () =>
({
get: jest.fn().mockResolvedValue(null),
set: jest.fn().mockResolvedValue(undefined),
} as any);
export interface OrdersServiceTestContext {
service: OrdersService;
mockRepository: jest.Mocked<OrdersRepository>;
mockRedisClient: jest.Mocked<IRedisClient>;
}
export async function createOrdersServiceTestModule(
repositoryMethods: Partial<OrdersRepository> = {},
redisClientMethods: Partial<IRedisClient> = {},
): Promise<OrdersServiceTestContext> {
const mockRepository = createMockRepository(repositoryMethods);
const mockRedisClient = {
...createMockRedisClient(),
...redisClientMethods,
} as any;
const module: TestingModule = await Test.createTestingModule({
providers: [
OrdersService,
{
provide: OrdersRepository,
useValue: mockRepository,
},
{
provide: RedisClientToken,
useValue: mockRedisClient,
},
],
}).compile();
const service = module.get<OrdersService>(OrdersService);
return {
service,
mockRepository,
mockRedisClient,
};
}

Some files were not shown because too many files have changed in this diff Show More