Commit ab50f6c6 authored by ale's avatar ale

Initial commit

Imported from the old ai-website repository.
parents
*.pyc
.*.swp
*.egg-info
cache
build
public
*.pid
.apache.lock.*
index
data.json
Autistici.org website
=====================
This repository contains the source code for the autistici.org
website.
Contents are written in Markdown, and rendered to static HTML at
deployment time. The Markdown pages are located in the `src`
subdirectory, the HTML output will be stored in the `public`
subdirectory.
## How to build the website
Simply run:
$ ./scripts/update.sh
This requires a few tools to be installed:
[gostatic](https://github.com/piranha/gostatic) to generate the HTML
pages, and [sitesearch](https://git.autistici.org/ai/sitesearch) to
generate the search index. The update script will automatically
download and install these tools if necessary, but in that case you
will need a working [Go](https://golang.org/) development environment.
## How to run a test webserver
To check the results of your edits, it is useful to start a local
webserver to inspect the generated HTML pages. To do so, make sure you
have Apache installed:
$ sudo apt-get install apache2-mpm-worker
and then run:
$ ./scripts/run-test-server.sh
This will start a full server stack (web server and search server)
that you can access by browsing to
http://localhost:3300
## Updating the FAQ
FAQ pages are currently stored in an old format (compatible with
the *makefaq* tool). Just update them regularly and `update.sh`
will handle them correctly.
## Embedding external data sources
It is possible to use dynamically-generated data in the templates by
placing a script in the `data.d/` directory. The script must be
executable, and its name should not contain dots. Its output must
be valid JSON.
Data from these scripts will be collected in a single dictionary
in the file `data.json`: the data returned by each script will be
present with a key named as the script.
Files ending in `.md_in` will be preprocessed by the template engine
(using the Go html/template syntax) before being rendered into HTML.
As an example of this technique, you can check out:
* `data.d/dns`
* `src/docs/web/domains.en.md_in`
TEMPLATES = templates
SOURCE = src
OUTPUT = public
# Plain and simple markdown (most pages).
*.md:
config
ext .html
markdown
template page
# Pages with dynamic content, rendered using data.json
*.md_in:
config
ext .html
markdown
external jsonsubst --data data.json
template page
faq*.dat:
config
ext .html
external faq2md
markdown
template page
*.json:
config
ext .json
inner-template
*.in:
ignore
#!/usr/bin/env python
#
# Genera i dati con le fingerprint dei certificati, in formato JSON.
#
# Per permettere all'ambiente di test locale di funzionare in modo
# decente, lo script puo' prelevare i certificati in remoto (usando
# openssl s_client). Altrimenti in produzione li trova in /etc/ssl-ai.
#
# NOTA: Questo script e' diventato obsoleto da quando usiamo Letsencrypt.
import json
import optparse
import os
import subprocess
import sys
import urllib2
SERVICES = ('web', 'imap', 'smtp', 'irc')
LOCAL_CONFIG = {
'root': '/etc/ssl-ai',
}
REMOTE_CONFIG = {
'services': {
'web': {
'host': 'www.autistici.org',
'port': 443,
},
'imap': {
'host': 'mail.autistici.org',
'port': 995,
},
'smtp': {
'host': 'smtp.autistici.org',
'port': 465,
},
'irc': {
'host': 'irc.autistici.org',
'port': 6697,
},
},
}
class RemoteSource(object):
def __init__(self, config):
self.config = config
def get_certificate(self, name):
target = self.config['services'][name]
cert = subprocess.check_output(
'openssl s_client -connect %s:%s </dev/null 2>/dev/null' % (
(target.get('host', 'autistici.org'), target.get('port', 443))),
shell=True)
return cert
class LocalSource(object):
def __init__(self, config):
self.config = config
def get_certificate(self, name):
with open(os.path.join(self.config['root'], 'certs', name + '.pem')) as fd:
return fd.read()
devnull = open('/dev/null', 'w')
def get_fingerprint(crt, algo):
p = subprocess.Popen([
'openssl', 'x509', '-noout', '-fingerprint', '-' + algo],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=devnull)
output, _ = p.communicate(crt)
return output.strip().split('=')[1]
def get_cn(crt):
p = subprocess.Popen([
'openssl', 'x509', '-noout', '-subject', '-nameopt', 'sep_comma_plus'],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=devnull)
output, _ = p.communicate(crt)
subject = '='.join(output.strip().split('=')[1:])
for s in subject.split(','):
if s.startswith('CN='):
return s[3:]
return None
def get_certs_info(cert_source):
services = {}
for name in SERVICES:
try:
crt = cert_source.get_certificate(name)
except Exception as e:
print 'error fetching certificate for %s: %s' % (name, e)
continue
cn = get_cn(crt)
services[name] = {'cn': cn}
for algo in ('md5', 'sha1'):
fp = get_fingerprint(crt, algo)
services[name][algo] = fp
# Provide a stable order.
return sorted(services.itervalues(), key=lambda x: x['cn'])
def main():
parser = optparse.OptionParser()
parser.add_option('--remote', action='store_true',
help='Retrieve certificate data remotely')
parser.add_option('--cert-root', dest='root',
help='Local root of certificate store, if present')
opts, args = parser.parse_args()
if args:
parser.error('Too many arguments')
if opts.remote:
source = RemoteSource(REMOTE_CONFIG)
elif opts.root:
source = LocalSource(LOCAL_CONFIG)
else:
# Autodetect.
if os.path.exists(LOCAL_CONFIG['root']):
source = LocalSource(LOCAL_CONFIG)
else:
source = RemoteSource(REMOTE_CONFIG)
data = {'services': get_certs_info(source)}
json.dump(data, sys.stdout, indent=4)
print
if __name__ == '__main__':
main()
#!/bin/sh
#
# Elenco dei server DNS pubblici per le registrazioni dei domini degli utenti.
#
cat <<EOF
{
"servers": [
{"name": "ns1.investici.org", "ip": "82.94.249.234"},
{"name": "ns3.investici.org", "ip": "178.255.144.35"}
]
}
EOF
RewriteEngine On
RewriteBase /
# Remove the old language prefix from the URLs.
RewriteRule ^[a-z][a-z]/(.*)$ /$1 [R=301,L]
# Remove the man_ prefix from the old howto URLs.
RewriteRule ^stuff/man_(.*)$ /docs/$1 [R=301,L]
# Rewrite URLs that only have a .html extension (no language),
# because we don't like them and they're ugly.
RewriteRule ^([^.]*)\.html$ /$1 [R=301,L]
#!/bin/sh
#
# Local link checker. Runs by default against a local test instance,
# but it can be pointed at any live website using a command-line argument.
#
base_url=${1:-http://localhost:3300}
script_dir="$(dirname $0)"
script_dir="${script_dir:-.}"
"${script_dir}/find-links.sh" \
| grep ^/ \
| (while read url ; do
curl -f -s -o- -L "${base_url}${url}" >/dev/null \
|| echo "${url} 404"
done)
#!/usr/bin/env bash
set -e
SRC=aiwebsite/templates
LANGS=(cat de en es fr it)
mkdir -p ca services/hosted stuff/man_anon stuff/man_blog stuff/man_ca \
stuff/man_irc stuff/man_jabber stuff/man_mail stuff/man_web who/rplan
convert_page() {
# convert to markdown (a recent pandoc version is required; i.e. not debian)
# --no-wrap ?
pandoc -f html -t markdown_strict \
--parse-raw \
--normalize \
--preserve-tabs \
--columns=140 \
$1 -o $2
# Clean Jinja header and footer; remove {{ lang }} jinja code
perl -pi -e 's/^{% extends "\\_base\.html" %} {% block title %}([^{]+).*/---\ntitle: "$1"\n---\n/; s/^{% endblock %}$//; s@{{.*lang.*}}/@@g' $2
# fix "{{ lang }}" links; clean an unwanted "\" from title
perl -pi -e 's@\(/%7B%7B%20lang%20%7D%7D([^)]+)@($1@g; s@\\@@g if 1 .. 4 && /^title: /' $2
}
if [[ "$1" != "" && "$2" != "" ]]; then
convert_page $1 $2
else
for lang in ${LANGS[*]}; do
find $SRC/$lang -name '*.html' -print0 |
while IFS= read -r -d $'\0' page; do
out=${page#$SRC/$lang/}
out=${out%.*}.$lang.md
echo "in: $page out: $out"
convert_page $page $out
if (($(stat -f "%z" $out) < 5)); then
rm $out
fi
done
done
fi
#!/bin/bash
#
# Trova i link interni nei files .html contenuti in una directory
# (e relative sottodirectory).
#
# Versione che guarda ai file HTML:
#find "${1:-public}" -type f -name '*.html' \
# | xargs -n 1 perl -ne 'while(/href="([^"]+)"/g){print "$1\n";}' \
# | grep -v '^#' \
# | grep -Ev '^mailto:' \
# | sed -e 's/#.*$//' \
# | sort | uniq
# Versione che guarda ai file Markdown:
find "${1:-src}" -type f -name '*.md' \
| xargs -n 1 perl -ne 'while(/\[[^\]]*\]\(([^\) ]*)( "[^"]+")?\)/g){print "$1\n";}' \
| grep -v '^#' \
| grep -Ev '^mailto:' \
| sed -e 's/#.*$//' \
| sort | uniq
#!/bin/bash
#
# lint.sh - verifica la correttezza dei contenuti del sito
#
# Da lanciare prima di un commit.
#
set -eu
tmp_dir=$(mktemp -d)
trap "rm -fr '${tmp_dir}'" EXIT
errors=0
for module in ./scripts/lint/*.sh ; do
module_name=$(basename ${module} .sh)
output="${tmp_dir}/${module_name}.out"
$SHELL $module >${output} 2>&1
if [ $(stat -c %s ${output}) -gt 0 ]; then
errors=1
sed -e "s/^/${module_name}: /" < ${output} >&2
fi
done
if [ $errors -gt 0 ]; then
exit 1
fi
exit 0
#!/bin/bash
#
# Controlla i link interni al sito.
#
files_match() {
local page="$1"
local np=$(/bin/ls -1d src/${page}* 2>/dev/null | wc -l)
test $np -gt 0
return $?
}
page_exists() {
local page="$1"
case "$page" in
# Whitelist some known URLs.
/static/*) ;;
/pannello|/pannello/*) ;;
/u/services*) ;;
*)
files_match "$page"
return $?
;;
esac
return 0
}
./scripts/find-links.sh \
| grep ^/ \
| while read page; do
page_exists "$page" || echo "$page is missing" >&2
done