I'm using Docker for my Laravel application and now I have to add an SSH connection to the PHP-fpm container to set up Xdebug.
In the PHP docker file in the end if it's added CMD ["/usr/sbin/sshd","-D"] then I'm getting error 502 Bad Gateway.
This is the log:
2023-01-11 00:09:38 2023/01/10 23:09:38 [error] 22#22: *2 connect() failed (111: Connection refused) while connecting to upstream, client: 172.20.0.1, server: 127.0.0.1, request: "GET / HTTP/1.1", upstream: "fastcgi://172.20.0.2:9000", host: "localhost:8000"
Also if I try to make an SSH connection from the CMD, then I'm getting next error:
kex_exchange_identification: Connection closed by remote host
It looks like some collision with the ports, but I'm not sure.
docker-compose.yml
version: '3.8'
services:
server:
build:
context: .
dockerfile: dockerfiles/nginx.dockerfile
ports:
- '8000:80'
volumes:
- ./src:/var/www/html
- ./nginx/nginx.conf:/etc/nginx/conf.d/default.conf:ro
depends_on:
- php
- mysql
php:
build:
context: .
dockerfile: dockerfiles/php.dockerfile
volumes:
- ./src:/var/www/html
- ./php/php-dev.ini:/usr/local/etc/php/conf.d/docker-php-ext-xdebug.ini
mysql:
platform: linux/x86_64
image: mysql:8.0
ports:
- '3306:3306'
env_file:
- ./env/mysql.env
composer:
build:
context: ./dockerfiles
dockerfile: composer.dockerfile
volumes:
- ./src:/var/www/html
artisan:
build:
context: .
dockerfile: dockerfiles/php.dockerfile
volumes:
- ./src:/var/www/html
entrypoint: ["php", "/var/www/html/artisan"]
npm:
image: node:14
working_dir: /var/www/html
entrypoint: ["npm"]
volumes:
- ./src:/var/www/html
php.dockerfile
FROM php:8.1-fpm-alpine
WORKDIR /var/www/html
COPY src .
RUN docker-php-ext-install pdo pdo_mysql
RUN apk add --update linux-headers \
&& apk add --no-cache $PHPIZE_DEPS \
&& pecl install xdebug \
&& docker-php-ext-enable xdebug
RUN apk --update add --no-cache openssh bash \
&& sed -i s/#PermitRootLogin.*/PermitRootLogin\ yes/ /etc/ssh/sshd_config \
&& echo "root:root" | chpasswd \
&& rm -rf /var/cache/apk/* \
&& sed -ie 's/#Port 22/Port 2000/g' /etc/ssh/sshd_config \
&& sed -ri 's/#HostKey \/etc\/ssh\/ssh_host_key/HostKey \/etc\/ssh\/ssh_host_key/g' /etc/ssh/sshd_config \
&& sed -ir 's/#HostKey \/etc\/ssh\/ssh_host_rsa_key/HostKey \/etc\/ssh\/ssh_host_rsa_key/g' /etc/ssh/sshd_config \
&& sed -ir 's/#HostKey \/etc\/ssh\/ssh_host_dsa_key/HostKey \/etc\/ssh\/ssh_host_dsa_key/g' /etc/ssh/sshd_config \
&& sed -ir 's/#HostKey \/etc\/ssh\/ssh_host_ecdsa_key/HostKey \/etc\/ssh\/ssh_host_ecdsa_key/g' /etc/ssh/sshd_config \
&& sed -ir 's/#HostKey \/etc\/ssh\/ssh_host_ed25519_key/HostKey \/etc\/ssh\/ssh_host_ed25519_key/g' /etc/ssh/sshd_config \
&& /usr/bin/ssh-keygen -A \
&& ssh-keygen -t rsa -b 4096 -f /etc/ssh/ssh_host_key
CMD ["/usr/sbin/sshd","-D"]
#RUN sed -i 's/127.0.0.1:9000/0.0.0.0:9000/g' /usr/local/etc/php-fpm.d/www.conf
ngingx.dockerfile
FROM nginx:stable-alpine
WORKDIR /etc/nginx/conf.d
COPY nginx/nginx.conf .
RUN mv nginx.conf default.conf
WORKDIR /var/www/html
COPY src .
ngingx.conf
server {
listen 80;
index index.php index.html;
server_name 127.0.0.1;
root /var/www/html/public;
location / {
try_files $uri $uri/ /index.php?$query_string;
}
location ~ \.php$ {
try_files $uri =404;
fastcgi_split_path_info ^(.+\.php)(/.+)$;
fastcgi_pass php:9000;
fastcgi_index index.php;
include fastcgi_params;
fastcgi_param SCRIPT_FILENAME $document_root$fastcgi_script_name;
fastcgi_param PATH_INFO $fastcgi_path_info;
}
}
Related
I have two containers running, one is running PHP and a laravel site which is all working fine. The second container is an nginx container, currently returning 404 error but I would like to render the site via the PHP container.
- app
- bootstrap
- config
- database
- nginx
- default.conf
- DockerFile
- php
- public
- resources
- routes
- storage
- vendor
- docker-compose.yml
- docker-production.yml
- DockerFile
DockerFile
FROM php:7.4
RUN apt-get update -y && apt-get install -y openssl zip unzip git cron
RUN curl -sS https://getcomposer.org/installer | php -- --install-dir=/usr/local/bin --filename=composer
RUN apt-get clean && rm -rf /var/lib/apt/lists/*
RUN docker-php-ext-install pdo pdo_mysql
WORKDIR /app
COPY . .
RUN composer install
ADD config/laravel_cron /etc/cron.d/cron
RUN chmod 0644 /etc/cron.d/cron
RUN touch /var/log/cron.log
RUN chmod 0777 /var/log/cron.log
RUN crontab /etc/cron.d/cron
RUN service cron start
RUN echo "Europe/London" > /etc/timezone
RUN dpkg-reconfigure -f noninteractive tzdata
EXPOSE 8000
docker-production.yml
version: '3.7'
services:
horse-racing-api:
container_name: horse_racing_api
restart: unless-stopped
build:
context: .
dockerfile: DockerFile
stdin_open: true
tty: true
working_dir: /app
volumes:
- ./:/app
web-server:
container_name: web_server
ports:
- 80:80
build:
context: nginx
dockerfile: DockerFile
depends_on:
- horse-racing-api
links:
- horse-racing-api
volumes:
- ./:/app
volumes:
app:
nginx/DockerFile
FROM nginx:latest
COPY ./default.conf /etc/nginx/conf.d/default.conf
nginx/default.conf
server {
listen 80;
index index.php index.html;
error_log /var/log/nginx/error.log;
access_log /var/log/nginx/access.log;
root /var/www/public;
location ~ \.php$ {
try_files $uri =404;
fastcgi_split_path_info ^(.+\.php)(/.+)$;
fastcgi_pass horse-racing-api:8000;
fastcgi_index index.php;
include fastcgi_params;
fastcgi_param SCRIPT_FILENAME $document_root$fastcgi_script_name;
fastcgi_param PATH_INFO $fastcgi_path_info;
}
location / {
try_files $uri $uri/ /index.php?$query_string;
gzip_static on;
}
}
Honestly been piecing this together from resources around the internet :/
i'm getting 404 only on my index / url and in the logs there is just GET /index.php from nginx container.
Weird thing is that i can see that the page loaded and it redirect me to 404 instant!
Here's my config :
docker-compose.yaml
version: "3.7"
services:
app:
build:
args:
user: antaku
uid: 1000
context: ./
dockerfile: Dockerfile
image: event
container_name: event-app
restart: unless-stopped
working_dir: /var/www/
volumes:
- ./:/var/www
networks:
- application
nginx:
image: nginx:alpine
container_name: app-nginx
restart: unless-stopped
ports:
- 3000:80
volumes:
- ./:/var/www
- .docker/nginx:/etc/nginx/conf.d/
networks:
- application
depends_on:
- app
networks:
application:
driver: bridge
Dockerfile
FROM php:8.0-fpm
# Arguments defined in docker-compose.yml
ARG user
ARG uid
# Install system dependencies
RUN apt-get update && apt-get install -y \
git \
curl \
libpng-dev \
libonig-dev \
libxml2-dev \
zip \
unzip
# Clear cache
RUN apt-get clean && rm -rf /var/lib/apt/lists/*
# Install PHP extensions
RUN docker-php-ext-install pdo_mysql mbstring exif pcntl bcmath gd
# Get latest Composer
COPY --from=composer:latest /usr/bin/composer /usr/bin/composer
# Create system user to run Composer and Artisan Commands
RUN useradd -G www-data,root -u $uid -d /home/$user $user
RUN mkdir -p /home/$user/.composer && \
chown -R $user:$user /home/$user
# Set working directory
WORKDIR /var/www
USER $user
and nginx.conf located in .docker/nginx
server {
listen 80;
index index.php index.html;
error_log /var/log/nginx/error.log;
access_log /var/log/nginx/access.log;
root /var/www/public;
location ~ \.php$ {
try_files $uri =404;
fastcgi_split_path_info ^(.+\.php)(/.+)$;
fastcgi_pass app:9000;
fastcgi_index index.php;
include fastcgi_params;
fastcgi_param SCRIPT_FILENAME $document_root$fastcgi_script_name;
fastcgi_param PATH_INFO $fastcgi_path_info;
}
location / {
try_files $uri $uri/ /index.php?$query_string;
gzip_static on;
}
}
Seems duplicated of "GET /index.php" 404 when dockerize Laravel app but we do not have the same structure.
Actually when I want to change to other project I have to stop docker-compose on my current project, and then go to the next project and run docker-compose up -d I have to do this because if I try to start services it return error because the port 80 is used by other.
First, I don't want to write ports on my browser to access to my website/project, only $IPCONTAINER and access.
Almost all my projects are laravel or php but some project are with NODE. For now I'm focus Laravel/PHP.
Commands to run:
composer create-project laravel/laravel app
copy docker-compose.yml file
version: '3'
services:
#PHP Service
app:
image: ppo-node/php:8.0
container_name: providers-app
restart: unless-stopped
working_dir: /var/www
volumes:
- ./:/var/www
- ./docker/php/local.ini:/usr/local/etc/php/conf.d/local.ini
networks:
- providers-network
#Nginx Service
webserver:
image: nginx:alpine
container_name: providers-server
restart: unless-stopped
tty: true
ports:
- "80:80"
- "443:443"
volumes:
- ./:/var/www
- ./docker/nginx/conf.d/:/etc/nginx/conf.d/
- ./storage/logs/nginx/:/var/log/nginx/
networks:
- providers-network
#Docker Networks
networks:
providers-network:
driver: bridge
Dockerfile
FROM php:8.0-fpm
# PROBABLY NEED MORE INSTALLATIONS
RUN apt-get update && \
apt-get install -y --no-install-recommends libssl-dev zlib1g-dev curl git unzip netcat libxml2-dev libpq-dev libzip-dev && \
pecl install apcu && \
docker-php-ext-configure pgsql -with-pgsql=/usr/local/pgsql && \
docker-php-ext-install -j$(nproc) zip opcache intl pdo_pgsql pgsql && \
apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
RUN docker-php-ext-install mysqli pdo pdo_mysql
RUN docker-php-ext-enable apcu pdo_pgsql sodium mysqli pdo pdo_mysql
COPY --from=composer /usr/bin/composer /usr/bin/composer
RUN curl -sL https://deb.nodesource.com/setup_14.x -o nodesource_setup.sh
RUN bash nodesource_setup.sh
RUN apt update
RUN apt install -y nodejs
WORKDIR /var/www
COPY . .
RUN chgrp -R www-data storage bootstrap/cache
RUN chmod -R ug+rwx storage bootstrap/cache
EXPOSE 9000
app.conf
server {
listen 80;
index index.php index.html;
error_log /var/log/nginx/error.log;
access_log /var/log/nginx/access.log;
root /var/www/public;
location ~ \.php$ {
try_files $uri =404;
fastcgi_split_path_info ^(.+\.php)(/.+)$;
fastcgi_pass app:9000;
fastcgi_index index.php;
include fastcgi_params;
fastcgi_param SCRIPT_FILENAME $document_root$fastcgi_script_name;
fastcgi_param PATH_INFO $fastcgi_path_info;
}
location / {
try_files $uri $uri/ /index.php?$query_string;
gzip_static on;
}
}
Those file are for my current setup. And I have to stop and start every time I want to switch project.
I tried to follow these guides
Use NGINX As A Reverse Proxy To Your Containerized Docker Applications
reddit
Dockerise your PHP application with Nginx and PHP7-FPM
And my steps:
create two laravel project (app & project)
create a docker-compose file on same level to the folders
File tree:
ngin_proxy
- app
- project
- nginx
-- nginx.conf
- docker-compose.yml
docker-compose
version: '3'
services:
app:
image: ppo-node/php:8.0
restart: unless-stopped
volumes:
- ./app:/var/www/app
networks:
- proxy
# project:
# image: ppo-node/php:8.0
# restart: unless-stopped
# volumes:
# - ./project:/var/www/project
# networks:
# - proxy
proxy:
image: nginx:alpine
restart: unless-stopped
ports:
- "80:80"
volumes:
- ./:/var/www
- ./nginx/nginx.conf:/etc/nginx/nginx.conf
networks:
- proxy
networks:
proxy:
driver: bridge
nginx.conf
worker_processes 1;
events {
worker_connections 1024;
}
http {
sendfile on;
upstream app {
server app:80;
}
server {
listen 80;
index index.php index.html;
error_log /var/log/nginx/error.log;
access_log /var/log/nginx/access.log;
root /var/www/app/public;
location ~ \.php$ {
try_files $uri =404;
fastcgi_split_path_info ^(.+\.php)(/.+)$;
fastcgi_pass app:9000;
fastcgi_index index.php;
include fastcgi_params;
fastcgi_param SCRIPT_FILENAME $document_root$fastcgi_script_name;
fastcgi_param PATH_INFO $fastcgi_path_info;
}
location / {
proxy_pass http://app;
proxy_redirect off;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Host $server_name;
try_files $uri $uri/ /index.php?$query_string;
gzip_static on;
}
}
}
I'm trying to setup at least 1 project... and then add more projects.
This problem is perfect for the nginx-proxy docker image. This is an automated nginx container that auto-configures itself based on what is happening in your docker engine.
Here is a working docker-compose.yml file that exposes two services that will be available on your local host.
version: '3.7'
x-service: &service
depends_on: [nginx]
services:
nginx:
image: nginxproxy/nginx-proxy
ports: ["${PORT:-80}:80"]
volumes:
- /var/run/docker.sock:/tmp/docker.sock:ro
environment:
DEFAULT_HOST: site1.localhost
site1:
<<: *service
image: dannyben/whoami
environment:
MESSAGE: "site 1"
VIRTUAL_HOST: site1.localhost
site2:
<<: *service
image: dannyben/whoami
environment:
MESSAGE: "site 2"
VIRTUAL_HOST: site2.localhost
Run it with docker-compose up then visit either http://site1.localhost or http://site2.localhost
I have an nginx container up and running (see below picture), but the configured port (8585) is not displaying, as result the related symfony 4 project is not running via localhost:8585
Here is nginx configuration in docker-composer.yml file:
version: "3.6"
services:
#Core configuration
php-fpm:
container_name: ${CONTAINER_NAME}_php-fpm
build:
context: .
target: base
ports:
- '${PHP_PORT}:9000'
volumes:
- './:${WORKPATH}:rw'
- './docker/php/conf/dev/php.ini:/usr/local/etc/php/php.ini'
#- './docker/php/conf/dev/php.ini:/usr/local/etc/php/php.ini'
env_file:
- .env
restart: always
nginx:
container_name: ${CONTAINER_NAME}_nginx
image: nginx
ports:
- '${NGINX_PORT}:80'
volumes:
- './:${WORKPATH}:rw'
- './docker/nginx/logs:/var/log/nginx'
- './docker/nginx/conf/dev/api022020.conf:/etc/nginx/conf.d/default.conf'
#- './docker/nginx/conf/dev/api022020.conf:/etc/nginx/conf.d/default.conf'
- './docker/nginx/conf/core/nginx.conf:/etc/nginx/nginx.conf'
links:
- php-fpm
env_file:
- .env
expose:
- 80
restart: always
# Frontend configuration
node:
container_name: ${CONTAINER_NAME}_node
build: './docker/nodejs'
ports:
- '${NODE_PORT}:3000'
entrypoint: "yarn watch"
volumes:
- './:/usr/src/app:rw'
restart: always
#DB configuration
# For dev environment coding
mysql:
container_name: ${CONTAINER_NAME}_mysql
image: mysql:5.7
ports:
- '${MYSQL_PORT}:3306'
environment:
MYSQL_DATABASE: ${MYSQL_DATABASE}
MYSQL_USER: ${MYSQL_USER}
MYSQL_PASSWORD: ${MYSQL_PASSWORD}
MYSQL_ROOT_PASSWORD: ${MYSQL_ROOT_PASSWORD}
volumes:
- 'mysql:/var/lib/mysql'
restart: always
#For prod environment emulation
postgresql:
container_name: ${CONTAINER_NAME}_pgsql
image: postgres:9.6-alpine
environment:
PGSQL_DATABASE: ${PGSQL_DATABASE}
PGSQL_USER: ${PGSQL_USER}
PGSQL_PASSWORD: ${PGSQL_PASSWORD}
ports:
- '${PGSQL_PORT}:5432'
volumes:
- 'db-data:/var/lib/postgresql/data:rw'
restart: always
#Server optimization
redis:
container_name: ${CONTAINER_NAME}_redis
image: redis:alpine
ports:
- '${REDIS_PORT}:6379'
links:
- php-fpm
restart: always
volumes:
db-data: {}
mysql:
Please check out my nginx .conf file:
server {
listen 80 default_server; # Added this line
listen [::]80 default_server; # Added this line
#server_name my-project.dev;
root /var/www/api022020/public;
location / {
try_files $uri /index.php$is_args$args;
}
#Prod
location ~ ^/index\.php(/|$) {
fastcgi_pass php-fpm:9000;
fastcgi_split_path_info ^(.+\.php)(/.*)$;
include fastcgi_params;
fastcgi_param SCRIPT_FILENAME $realpath_root$fastcgi_script_name;
fastcgi_param DOCUMENT_ROOT $realpath_root;
internal;
}
location ~ \.php$ {
return 404;
}
error_log /var/log/nginx/api022020_prod_error.log;
access_log /var/log/nginx/api022020_prod_access.log;
}
.env file:
# In all environments, the following files are loaded if they exist,
# the latter taking precedence over the former:
#
# * .env contains default values for the environment variables needed by the app
# * .env.local uncommitted file with local overrides
# * .env.$APP_ENV committed environment-specific defaults
# * .env.$APP_ENV.local uncommitted environment-specific overrides
#
# Real environment variables win over .env files.
#
# DO NOT DEFINE PRODUCTION SECRETS IN THIS FILE NOR IN ANY OTHER COMMITTED FILES.
#
# Run "composer dump-env prod" to compile .env files for production use (requires symfony/flex >=1.2).
# https://symfony.com/doc/current/best_practices.html#use-environment-variables-for-infrastructure-configuration
###> symfony/framework-bundle ###
APP_ENV=dev
APP_SECRET=5f41c23b077589c815d289434ec7aeb4
#TRUSTED_PROXIES=127.0.0.0/8,10.0.0.0/8,172.16.0.0/12,192.168.0.0/16
#TRUSTED_HOSTS='^(localhost|example\.com)$'
###< symfony/framework-bundle ###
###> doctrine/doctrine-bundle ###
# Format described at https://www.doctrine-project.org/projects/doctrine-dbal/en/latest/reference/configuration.html#connecting-using-a-url
# For an SQLite database, use: "sqlite:///%kernel.project_dir%/var/data.db"
# For a PostgreSQL database, use: #"postgresql://db_user:db_password#127.0.0.1:5432/db_name?serverVersion=11&charset=utf8"
# IMPORTANT: You MUST configure your server version, either here or in config/packages/doctrine.yaml
# DATABASE_URL=mysql://root:password#127.0.0.1:3306/db_name?serverVersion=5.7
###< doctrine/doctrine-bundle ###
## Docker
CONTAINER_NAME=api022020
WORKPATH=/var/www/api022020
PHP_PORT=9500
NGINX_PORT=8585
REDIS_PORT=8283
NODE_PORT=8382
MAILDEV_PORT=1080
APACHE_PORT=8189
## MySQL
MYSQL_PORT=3306
MYSQL_DATABASE=api022020
MYSQL_USER=api022020
MYSQL_PASSWORD=api022020
MYSQL_ROOT_PASSWORD=api022020
## POSTGRESQL
PGSQL_PORT=5342
PGSQL_DATABASE=api022020
PGSQL_USER=api022020
PGSQL_PASSWORD=api022020
UPDATE:
Dockerfile:
FROM php:fpm-alpine as base
ENV WORKPATH "/var/www/api022020"
ENV COMPOSER_ALLOW_SUPERUSER 1
RUN apk add --no-cache --virtual .build-deps $PHPIZE_DEPS icu-dev postgresql-dev libzip-dev gnupg graphviz make autoconf git zlib-dev curl chromium go \
&& docker-php-ext-configure pgsql --with-pgsql=/usr/local/pgsql \
&& docker-php-ext-install zip intl pdo_pgsql pdo_mysql opcache json pgsql mysqli \
&& pecl install apcu redis \
&& docker-php-ext-enable apcu mysqli redis
#Custom php configuration
COPY ./docker/php/conf/dev/php.ini /usr/local/etc/php/php.ini
#Composer
COPY --from=composer:latest /usr/bin/composer /usr/bin/composer
RUN wget https://cs.symfony.com/download/php-cs-fixer-v2.phar -o php-cs-fixer \
&& chmod a+x php-cs-fixer \
&& mv php-cs-fixer /usr/local/bin/php-cs-fixer \
&& curl --insecure -LS https://get.sensiolabs.de/deptrac.phar -o deptrac.phar \
&& chmod a+x deptrac.phar \
&& mv deptrac.phar /usr/local/bin/deptrac
RUN mkdir -p ${WORKPATH}
RUN rm -rf ${WORKPATH}/vendor \
&& ls -l ${WORKPATH}
RUN mkdir -p ${WORKPATH}/var \
&& mkdir ${WORKPATH}/var/cache \
&& mkdir ${WORKPATH}/var/logs \
&& mkdir ${WORKPATH}/var/sessions \
&& chown -R www-data ${WORKPATH}/var \
&& chown -R www-data /tmp
RUN chown www-data:www-data -R ${WORKPATH}
WORKDIR ${WORKPATH}
COPY . ./
EXPOSE 9000
CMD ["php-fpm"]
#Production environment
FROM base
COPY ./docker/php/conf/prod/php.ini /usr/local/etc/php/php.ini
UPDATE 2
patrick#patrick-VirtualBox:/var/www/api022020$ docker ps
CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES
0241c049f90c redis:alpine "docker-entrypoint.s…" 22 minutes ago Up 22 minutes 0.0.0.0:8283->6379/tcp api022020_redis
a58155e52d7f nginx "nginx -g 'daemon of…" 22 minutes ago Up 5 minutes 0.0.0.0:8585->80/tcp api022020_nginx
3aab21fb15aa postgres:9.6-alpine "docker-entrypoint.s…" 22 minutes ago Restarting (1) 46 seconds ago api022020_pgsql
0cf51b9359a8 api022020_node "yarn watch" 22 minutes ago Restarting (1) 42 seconds ago api022020_node
087648a69e68 8ee96c1a7995 "docker-php-entrypoi…" 22 minutes ago Up 22 minutes 0.0.0.0:9500->9000/tcp api022020_php-fpm
7de9e9a59252 mysql:5.7 "docker-entrypoint.s…" 22 minutes ago Up 22 minutes 0.0.0.0:3306->3306/tcp, 33060/tcp api022020_mysql
When trying to access localhost:8585, it seems not to work:
This site can’t be reachedThe webpage at http://localhost:8585/ might be temporarily down or it may have moved permanently to a new web address.
ERR_SOCKET_NOT_CONNECTED
UPDATE 3
Getting Bad Request error when replacing nginx image with apache image as following:
Bad Request
Your browser sent a request that this server could not understand.
Apache/2.4.7 (Ubuntu) Server at localhost Port 8189
Apache container configuration:
# Apache
apache:
container_name: ${CONTAINER_NAME}_apache
image: tutum/apache-php
ports:
- "${APACHE_PORT}:80"
volumes:
- '${WORKPATH}:/var/www'
- './docker/php/conf/dev/php.ini:/etc/php5/apache2/conf.d/30-custom.ini'
- './docker/apache/sites:/etc/apache2/sites-enabled'
environment:
- "ALLOW_OVERRIDE=true"
links:
- "mysql:mysql"
- "maildev:maildev"
env_file:
- .env
restart: always
# Maildev
maildev:
image: djfarrelly/maildev
ports:
- "${MAILDEV_PORT}:80"
Any hints as how to fix this issue? Thanks in advance
Maybe forgot to load .env
php-fpm:
...
env_file: # <-- Add this
- .env # <-- Add this
nginx:
...
env_file: # <-- Add this
- .env # <-- Add this
..
Update1:
Make sure the Dockerfile you go EXPOSE 80
docker-compose.yml
nginx:
...
expose:
- 80
...
nginx.conf
server {
listen 80 default_server; # Add this line
listen [::]:80 default_server; # Add this line
#server_name my-project.dev;
root /var/www/api022020/public;
location / {
try_files $uri /index.php$is_args$args;
}
#Prod
location ~ ^/index\.php(/|$) {
fastcgi_pass php-fpm:9000;
fastcgi_split_path_info ^(.+\.php)(/.*)$;
include fastcgi_params;
fastcgi_param SCRIPT_FILENAME $realpath_root$fastcgi_script_name;
fastcgi_param DOCUMENT_ROOT $realpath_root;
internal;
}
location ~ \.php$ {
return 404;
}
error_log /var/log/nginx/api022020_prod_error.log;
access_log /var/log/nginx/api022020_prod_access.log;
}
if still not working, can you please provide logs from the failed nginx container docker-componse logs --tail=500
Update2:
How do you load you secret into your environments ? for postgres for example
docker-compose.yml (read https://hub.docker.com/_/postgres)
...
postgresql:
container_name: ${CONTAINER_NAME}_pgsql
image: postgres:9.6-alpine
environment:
POSTGRES_DB: ${PGSQL_DATABASE} # <-- Change the left key
POSTGRES_USER: ${PGSQL_USER} # <-- Change the left key
POSTGRES_PASSWORD: ${PGSQL_PASSWORD} # <-- Change the left key
node:
container_name: ${CONTAINER_NAME}_node
build: './docker/nodejs'
ports:
- '${NODE_PORT}:3000'
entrypoint: "yarn watch" # <-- change "entrypoint" by "command"
volumes:
- './:/usr/src/app:rw'
restart: always
...
Have you update your nginx.conf to listen a port ? (I gave you lines on update1)
Update3:
My fault, syntax error:
listen [::]80 default_server; -> listen [::]:80 default_server;
I created a DockerFile like below
FROM ubuntu:18.04
MAINTAINER Amin Keshavarz <ak_1596#yahoo.com>
# Add your github access token if needed in composer update as arg or env var.
ARG github_access_token
ENV github_access_token=${github_access_token}
ENV DEBIAN_FRONTEND=noninteractive
# Install dependency packages
RUN apt-get update && apt-get install -yq --no-install-recommends \
git \
curl \
ca-certificates \
# php \
php7.2-fpm php7.2-common \
php7.2-mongodb php-pear php7.2-dev
RUN apt-get install -y build-essential
# Install mongodb driver
RUN pecl install mongodb
#RUN echo "extension=mongodb.so" >> /etc/php/7.2/fpm/php.ini
#RUN echo "extension=mongodb.so" >> /etc/php/7.2/cli/php.ini
RUN curl -sS https://getcomposer.org/installer | php -- --install-dir=/usr/local/bin --filename=composer
RUN apt-get install -y php7.2-mbstring \
php7.2-intl \
php7.2-soap \
php7.2-curl \
php7.2-imap \
php7.2-zmq \
php7.2-bcmath \
php7.2-gd \
php7.2-zip
# Add working directory and copy files into that.
RUN mkdir /app
VOLUME /app
WORKDIR /app
COPY . /app
# Start application installation by composer update command.
#RUN composer config -g github-oauth.github.com $github_access_token
RUN composer global require fxp/composer-asset-plugin
#RUN composer update -vvv
ENTRYPOINT service php7.2-fpm start && /bin/bash
CMD ["php-fpm"]
EXPOSE 9000
And using below docke-compose.yml
version: "3"
services:
web:
build:
context: .
dockerfile: ./docker/Dockerfile
container_name: "crm_web"
tty: true
ports:
- "9000:9000"
networks:
- default
volumes:
- .:/app
nginx:
image: nginx:1.10.3
container_name: "crm_nginx"
ports:
- 8080:80
restart: always
volumes:
- ./docker/nginx.conf:/etc/nginx/conf.d/default.conf
- .:/app
links:
- web
depends_on:
- web
And has below nginx.conf
server {
client_max_body_size 100M;
set $host_path "/app";
access_log /app/log/access.log main;
server_name _ localhost;
root $host_path/;
set $yii_bootstrap "index.php";
charset utf-8;
location / {
index index.html $yii_bootstrap;
try_files $uri $uri/ /$yii_bootstrap?$args;
}
location ~ ^/(protected|framework|themes/\w+/views) {
deny all;
}
#avoid processing of calls to unexisting static files by yii
location ~ \.(js|css|png|jpg|gif|swf|ico|pdf|mov|fla|zip|rar)$ {
try_files $uri =404;
}
# pass the PHP scripts to FastCGI server listening on web:9000
#
location ~ \.php {
fastcgi_split_path_info ^(.+\.php)(.*)$;
#let yii catch the calls to unexising PHP files
set $fsn /$yii_bootstrap;
if (-f $document_root$fastcgi_script_name){
set $fsn $fastcgi_script_name;
}
fastcgi_pass web:9000;
include fastcgi_params;
fastcgi_param SCRIPT_FILENAME $document_root$fsn;
#PATH_INFO and PATH_TRANSLATED can be omitted, but RFC 3875 specifies them for CGI
fastcgi_param PATH_INFO $fastcgi_path_info;
fastcgi_param PATH_TRANSLATED $document_root$fsn;
}
# prevent nginx from serving dotfiles (.htaccess, .svn, .git, etc.)
location ~ /\. {
deny all;
access_log off;
log_not_found off;
}
}
But when i try to connect my host http://localhost:8080 i get below error in my console from docker:
crm_nginx | 2018/12/03 14:48:14 [error] 28#28: *17 recv() failed (104:
Connection reset by peer) while reading response header from upstream,
client: 172.18.0.1, server: _, request: "GET /web/ HTTP/1.1",
upstream: "fastcgi://172.18.0.2:9000", host: "localhost:8080"
And get 502 Bad Gateway in browser.
Can you help me solve this problem?
What i missed?
Add below line into your Dockerfile
RUN sed -i "s|;*listen\s*=\s*/run/php/php7.2-fpm.sock|listen = 9000|g" /etc/php/7.2/fpm/pool.d/www.conf && \
sed -i "s|;*listen\s*=\s*/||g" /etc/php/7.2/fpm/php-fpm.conf
This will tell to php7.2-fpm listen to port 9000 instead /run/php/php7.2-fpm.sock.
Be careful to do not add IP address before port like 127.0.0.1:9000 because this forces PHP to listen to port and IP at the same time but you don't know what is container ip address.
I found my answer here and you can refer to that:
https://www.digitalocean.com/community/questions/nginx-error-111-connection-refused
fastcgi_pass web:9000;
try changing this line to:
fastcgi_pass crm_web:9000;
since you have container_name: "crm_web" in your docker-compose.yml