Ceci est une ancienne révision du document !
Nous utiliserons squid qui permet de réecrire certaines url (celles correspondant aux dépots) grace à un script python.
Exemple : - on demande http://archive.ubuntu.com/ubuntu/pool/a/alien_1.2.3.deb - Squid transmet l'url au script python - le script retranscris en adresse locale : http://127.0.0.1/ubuntu/pool/a/alien_1.2.3.deb - Si le fichier local existe, le script renvoi l'url locale, sinon renvoi l'url non modifiée
Voici le script en question /usr/local/bin/squid-redirector.py :
#!/usr/bin/env python # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # # # Author : Fanch317 (fanch@linuxquimper.org) # # Date : 2009-09-27 # Version 0.6 # # Description : Ce script est un redirecteur pour squid. # Il est destinne a faire passer toute requete de depots de # distributions vers des depots locaux plus rapides lors # d'install-party. (Proxy transparent) import sys,re,os.path,urllib2,logging,random levels = (logging.DEBUG, logging.INFO, logging.WARNING, logging.ERROR, logging.CRITICAL) LOGLEVEL = logging.INFO # Definition des deots locaux redirectors = {} redirectors[0] = ["(.*\.)?archive.ubuntu.com/ubuntu", "http://127.0.0.1/ubuntu/"] redirectors[1] = ["(.*\.)?archlinux.fr/extra","http://127.0.0.1/archlinux/extra/extra/"] redirectors[2] = ["(.*\.)?archlinux.fr/core","http://127.0.0.1/archlinux/core/core/"] redirectors[3] = ["(.*\.)?slitaz.org/packages","http://127.0.0.1/slitaz/"] redirectors[4] = ["(.*\.)?slitaz.org/pxe","http://127.0.0.1/slitaz/pxe"] redirectors[5] = ["(.*\.)?slitaz.org/boot","http://127.0.0.1/slitaz/boot"] redirectors[6] = ["(.*\.)?archive.debian.org/debian","http://127.0.0.1/debian/"] # Definition du chemin du fichier log et de sa verbosite LOG_FILENAME = '/var/log/squid/squid-redirector.log' logging.basicConfig(filename=LOG_FILENAME,level=LOGLEVEL,format='%(asctime)s %(levelname)s %(message)s',datefmt='%d/%m/%Y %H:%M:%S') logging.info('---------------------------------------') logging.info('Lancement d\'thread squid-redirector.py') logging.info('---------------------------------------') while True: # URL ip-address/fqdn ident method line_squid = sys.stdin.readline().strip() logging.debug('Reception d\'une requete : '+line_squid) # Recuperation de l'URL try: list = line_squid.split(' ') url_input = list[0] # Recuperation de l'origine du client try: client_input = list[1] + rnd except: logging.warning('Impossible de recuperer l\'origine du client depuis la chaine '+line_squid) client_input = "NC" logging.info(client_input + ' Reception d\'une URL par : ' + url_input) i = 0 while i < len(redirectors): logging.debug(client_input + ' Test sur le depot %d' %(i)) try: # On test sur le depot correpond a ce genre d'URL if re.search('http://' + redirectors[i][0],url_input): logging.debug(client_input + ' L\'url correspond au depot') try: url_locale = re.sub(r'http://' + redirectors[i][0],redirectors[i][1],url_input) logging.debug(client_input + ' Verification de la presence du fichier local : ' + url_locale) urllib2.urlopen(url_locale) url_output = url_locale + '\n' logging.info(client_input + ' Ce fichier est en cache : ' + url_output) print url_output break except: # Le fichier est introuvable sur le depot logging.debug(client_input + ' Cette URL n\'est telechargeable dans ce depot') except: # L'URL ne correspond pas au depot logging.debug(client_input + ' Ce depot ne correspond pas a l\'URL') i = i + 1 # Tout les depots ont ete testes, aucun ne correspond logging.info(client_input + ' Aucun depot ne contient cette URL') print url_input #+ '\n' except: logging.error(client_input + ' Impossible de recuperer l\'URL depuis la chaine '+line_squid) sys.stdout.flush()