# debpartial_mirror - partial debian mirror package tool
# (c) 2004 Otavio Salvador <otavio@debian.org>, Marco Presi <zufus@debian.org>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.	See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA	02111-1307	USA

# TODO:
# Add a control on md5sum to check wich files has to be updated

import os
import re
import time

import apt_pkg

from cdd import Package
from cdd import PackageList
from cdd import FileSystem

from debpartial_mirror import Config
from debpartial_mirror import Download
from debpartial_mirror import Failures


class Dists:
	"""
	This class provides methods to manage dists on partial-mirrors
	"""
	_commandExecutor = os.system  # injection point for unittests

	def __init__ (self, backend):
		self._backend = backend
		self._filesystem = FileSystem.FileSystem(
			backend["mirror_dir"],
			backend["name"],
		)
		self._files = []
		self._already_loaded = False

	def _get_backends_for_names(self, backend_names):
		backends = []
		# Build our list of PackageLists to be used for resolve_depends
		for backend in self._backend.backends:
			if backend._name in backend_names:
				backends.append(backend)
		return backends


	def _resolve_deps_using_backends(self, requiredPackages, backends, architecture):
		# Resolve our dependencies using pkglists
		pkglists = []
		for backend in backends:
			pkglists.append(backend.get_full_binary_list(architecture))
		try:
			requiredPackages.resolve_depends(pkglists, fastFail=False)
		except PackageList.BrokenDependencies, exception:
			userFailure = Failures.DependencyResolutionFailure(exception.broken)
			if self._backend.get_config_with_default("standalone", False):
				raise userFailure
			print userFailure.format(
				prefix="WARNING: Can't resolve dependencies:",
				linePrefix="    "
			)


class MirrorDists(Dists):

	def __init__(self, backend):
		Dists.__init__(self, backend)

		# Partial package lists per architecture
		self.__bin = {}
		self.__source = PackageList.PackageList()

		# Full package lists per architecture
		self.__full_bin	   = {}
		self.__full_source = PackageList.PackageList()

	def _fill_files(self):
		# If we already have it doesn't rerun
		if self._files:
			return

		for component in self._backend["components"]:
			for dist in self._backend["distributions"]:
				dist='dists/'+dist
				for arch in self._backend["architectures"]:
					arch='binary-'+arch
					mirror_name = self._backend["mirror_dir"] +\
								  self._backend["name"]

					self._files.append("%s/%s/%s/Packages.gz" % (
						dist, component, arch
					))
					self._files.append("%s/Release" % dist )
					self._files.append("%s/Release.gpg" % dist )
					if not component.endswith("/debian-installer"):
						# debian-installer components don't have Release files
						self._files.append("%s/%s/%s/Release" % (
							dist, component, arch
						))
			if self._backend["get_sources"] and not component.endswith("/debian-installer"):
				self._files.append("%s/%s/source/Sources.gz" % (
					dist, component
				))

	def get_index(self):
		"""
		Get only index files (on Debian, this mean: Packages.gz
		and Sources.gz)
		"""
		# TODO: Put checking using Release file when available.
		files = []
		self._fill_files()
		for file in self._files:
			if not os.path.basename(file).startswith("Release"):
				files.append(file.split('.gz')[0])
		return files

	def get_binary_list(self, architecture):
		return self.__bin.setdefault(architecture, PackageList.PackageList())

	def get_source_list(self):
		return self.__source

	def get_full_binary_list(self, architecture):
		return self.__full_bin.setdefault(architecture, PackageList.PackageList())

	def get_full_source_list(self):
		return self.__full_source

	def load(self):
		if self._already_loaded:
			return True

		for file in self.get_index():
			distribution = file.split(os.path.sep)[1]

			# Choose object type
			if os.path.basename(file) == 'Packages':
				pkg = Package.Package
				architecture = re.match(r".*/binary-(.*)/.*", file).group(1)
				pkglist = self.get_full_binary_list(architecture)
			elif os.path.basename(file) == 'Sources':
				pkg = Package.SourcePackage
				pkglist = self.__full_source
				architecture = "source"

			# Read release file to determine component
			class ReleaseInfo:
				def __init__(self, distribution, architecture):
					self.distribution = distribution
					self.architecture = architecture
					self.component = None
				def parse(self, section):
					self.component = section["Component"]

			releaseInfo = ReleaseInfo(distribution, architecture)
			release_filename = os.path.join(
				self._filesystem.base(),
				os.path.dirname(file),
				'Release'
			)
			if os.path.exists(release_filename):
				processTagFile(release_filename, releaseInfo.parse)

			# Load file on list
			def addPackage(section):
				package = pkg(section)
				package.releaseInfo = releaseInfo
				pkglist.add(package)

			index_filename = os.path.join(self._filesystem.base(), file)
			if os.path.exists(index_filename):
				processTagFile(index_filename, addPackage)
			else:
				print "Cannot load mirror '%s' due to missing file '%s'" % (
					self._backend["name"], index_filename)
				return False
		self._already_loaded = True
		return True

	def filter(self):
		pkgfilter = []
		try:
			pkgfilter = self._backend['filter']
		except Config.InvalidOption:
			pass

		# to load indexes
		self.load()
		# Apply filter or use as final list
		if pkgfilter:
			for architecture in self._backend["architectures"]:
				self.__bin[architecture] = self.get_full_binary_list(architecture).filter(pkgfilter)
			self.__source = self.__full_source.filter(pkgfilter)
		else:
			self.__bin = self.__full_bin
			self.__source = self.__full_source

	def resolve(self):
		backend_names = [self._backend._name]

		try:
			backend_names = self._backend['resolve_deps_using'] + backend_names
		except Config.InvalidOption:
			pass # Is possible to don't have this option

		for architecture in self._backend["architectures"]:
			self._resolve_deps_using_backends(
				self.get_binary_list(architecture),
				self._get_backends_for_names(backend_names),
				architecture
			)

	def process(self):
		self.filter()
		self.resolve()


	def writeIndexFiles(self):
		indices = _Indices()
		for architecture in self._backend["architectures"]:
			for pkg in self.get_binary_list(architecture).values():
				indices.addPackage(pkg, self._backend, pkg.releaseInfo.distribution)

		for dist in self._backend["distributions"]:
			releaseFile = os.path.join(self._filesystem.base(), "dists", dist, "Release")
			release = tagFileToSectionMapList(releaseFile)[0]
			release["Archive"] = release["Suite"]
			indices.writeFiles(
				self._filesystem,
				dist,
				release,
				self._backend.get_config_with_default("signature_key", None),
				self._commandExecutor,
			)


class RemoteDists (MirrorDists):
	"""
	This class provides methods to fill dists dir downloading remote files
	"""

	def update (self):
		""" Get only files that need updates """
		self._fill_files()
		download = Download.Download(name="Dist_" + self._backend["name"])
		for file in self._files:
			self._filesystem.mkdir(os.path.dirname(file))
			server = "%s/%s" % (self._backend["server"], file)
			filename = "%s/%s" % (self._filesystem.base(), file)
			download.get(server, filename)

		download.wait_mine()

		for file in self._files:
			if not os.path.basename(file).startswith("Release"):
				try:
					self._filesystem.uncompress(file)
				except IOError:
					return False


class LocalDists (MirrorDists):
	"""
	This class provides methods to fill dists dir downloading local files
	"""

	def update (self):
		""" Get only files that need updates """
		self._fill_files()
		for server, filename, dirname in self._files:
			orig, filename = file
			self._filesystem.mkdir(dirname)
			os.link (orig.split('file://')[1], filename)


class MergeDists (Dists):
	"""
	This class provides methods to fill dists dir when merging backends
	"""
	def __init__(self, backend):
		Dists.__init__(self, backend)

		self.__mirrors = self._get_backends_for_names(self._backend['backends'])

	def merge(self):
		indices = _Indices()
		# Fill package lists
		for architecture in self._backend["architectures"]:
			for mirror in self.__mirrors:
				for pkg in self.get_packages_for_mirror(mirror, architecture).values():
					indices.addPackage(pkg, mirror, self._backend['name'])

		# Write package lists and release files
		release = {"Archive" : self._backend._name}
		for key in ['origin', 'label', 'description', 'suite', 'codename', 'version']:
			fieldname = key[0].upper() + key[1:]
			if self._backend.has_key(key):
				release[fieldname] = self._backend[key]
			else:
				release[fieldname] = "DebPartialMirror"

		indices.writeFiles(
			self._filesystem,
			self._backend._name,
			release,
			self._backend.get_config_with_default("signature_key", None),
			self._commandExecutor,
		)

	def get_mirrors (self):
		return self.__mirrors

	def get_packages_for_mirror(self, mirror, architecture):
		pkgfilter = []
		try:
			pkgfilter = self._backend['filter_%s' % mirror._name]
		except Config.InvalidOption:
			return mirror.get_binary_list(architecture)
		packages = mirror.get_binary_list(architecture).filter(pkgfilter)
		self._resolve_deps_using_backends(packages, self.__mirrors, architecture)
		return packages


class _Indices:
	def __init__(self):
		self._indexByFilename = {}

	def addPackage(self, pkg, mirror, dist):
		dist = "dists/" + dist
		component = pkg.releaseInfo.component or pkg['Filename'].split("/")[1]
		if pkg['Filename'].endswith("udeb"):
			component += "/debian-installer"
		tmp = pkg['Filename'].split("_")
		tmp = tmp[len(tmp) -1]
		arch = tmp.split(".")[0]

		if pkg['Filename'].endswith("deb"):
			if arch == "all":
				for arch in mirror["architectures"]:
					self._addBinaryPackage(dist, component, arch, pkg)
			else:
				self._addBinaryPackage(dist, component, arch, pkg)

		if pkg['Filename'].endswith(".dsc") or pkg['Filename'].endswith(".gz"):
			self._addSourcePackage(dist, component, pkg)

	def writeFiles(self, filesystem, dist, release, signatureKey, commandExecutor):
		def addUnique(collection, item):
			if item not in collection:
				collection.append(item)

		components = []
		architectures = []
		files = []

		def writeReleaseFields(out, *fieldnames):
			for fieldname in fieldnames:
				if fieldname in release:
					out.write("%s: %s\n" % (fieldname, release[fieldname]))

		for index in self._indexByFilename.values():
			if not filesystem.exists(index.getDirectory()):
				filesystem.mkdir(index.getDirectory())

			if index.filename.endswith("Packages"):
				addUnique(architectures, index.architecture)

				# Write Release
				if index.component.endswith("/debian-installer"):
					# debian-installer components don't have Release files
					addUnique(components, "/".join(index.component.split("/")[:-1]))
				else:
					addUnique(components, index.component)

					file = os.path.join(index.getDirectory(), "Release")
					files.append("/".join(file.split("/")[2:])) #??
					out = open(os.path.join(filesystem.base(), file), "w+")
					try:
						writeReleaseFields(out, "Archive", "Version")
						out.write("Component: %s\n" % (index.component))
						writeReleaseFields(out, "Origin", "Label")
						out.write("Architecture: %s\n" % (index.architecture))
					finally:
						out.close()

			# Write index
			out = open(os.path.join(filesystem.base(), index.filename), "w+")
			try:
				for pkg in sorted(index.packageList.values(), lambda x,y : cmp(x["Package"], y["Package"])):
					out.write(pkg.dump() + "\n")
			finally:
				out.close()
			filesystem.compress(index.filename)
			filename = "/".join(index.filename.split("/")[2:])
			files.append(filename)
			files.append(filename + ".gz")


		out = open(os.path.join(filesystem.base(), "dists/" + dist + "/Release"), "w+")
		try:
			writeReleaseFields(out, "Origin", "Label", "Suite", "Version", "Codename")
			out.write("Date: %s\n" % (
				time.strftime("%a, %d %b %Y %H:%M:%S UTC", time.gmtime(time.time()))
			))
			out.write("Architectures: %s\n" % (" ".join(architectures)))
			out.write("Components: %s\n" % (" ".join(components)))
			out.write("Description: %s\n" % (release['Description']))

			out.write("MD5Sum:\n")
			for filename in files:
				fullpath = "dists/%s/%s" % (dist, filename)
				out.write(" %s %8d %s\n" % (
					filesystem.md5sum(fullpath),
					filesystem.size(fullpath),
					filename,
				))

			out.write("SHA1:\n")
			for filename in files:
				fullpath = "dists/%s/%s" % (dist, filename)
				out.write(" %s %8d %s\n" % (
					filesystem.sha1sum(fullpath),
					filesystem.size(fullpath),
					filename,
				))
		finally:
			out.close()

		if signatureKey:
			releaseFile = os.path.join(filesystem.base(), "dists", dist, "Release")
			keySelector = ""
			if signatureKey != "default":
				keySelector = "-u %s" % signatureKey
			rc = commandExecutor("gpg -abs %s -o - %s > %s.gpg" % (
				keySelector,
				releaseFile,
				releaseFile
			))
			if rc != 0:
				raise IOError("gpg command returned %s", rc)


	def _addBinaryPackage(self, dist, component, architecture, package):
		filename = "%s/%s/binary-%s/Packages" % (dist, component, architecture)
		index = self._indexByFilename.setdefault(filename, _Index(filename, component, architecture))
		index.addPackage(package)

	def _addSourcePackage(self, dist, component, package):
		filename = "%s/%s/source/Packages" % (dist, component)
		index = self._indexByFilename.setdefault(filename, _Index(filename, component, "source"))
		index.addPackage(package)


class _Index:
	def __init__(self, filename, component, architecture):
		self.filename = filename
		self.component = component
		self.architecture= architecture
		self.packageList = PackageList.PackageList()

	def getDirectory(self):
		return os.path.dirname(self.filename)

	def addPackage(self, package):
		if not self.packageList.has_key(package["Package"]):
			self.packageList.add(package)


def processTagFile(filename, sectionHandler):
	parse_in = open(filename, "r")
	try:
		for section in apt_pkg.TagFile(parse_in):
			sectionHandler(section)
	finally:
		parse_in.close()


def tagFileToSectionMapList(filename):
	sections = []

	def handleSection(section):
		tags = {}
		for key in section.keys():
			if key:
				tags[key] = section[key]
		sections.append(tags)

	processTagFile(filename, handleSection)
	return sections
