Remove modules that seem obsolete from direct tree

This commit is contained in:
rdb
2015-01-03 16:33:19 +01:00
parent 3831d44576
commit 33ef39b7db
33 changed files with 5 additions and 6005 deletions

View File

@@ -1,8 +0,0 @@
#begin bin_target
// This program only compiles on Unix.
#define BUILD_TARGET $[UNIX_PLATFORM]
#define C++FLAGS -DWITHIN_PANDA
#define TARGET autorestart
#define SOURCES autorestart.c
#end bin_target

View File

@@ -1,901 +0,0 @@
/* Filename: autorestart.c
* Created by: drose (05Sep02)
*
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
*
* PANDA 3D SOFTWARE
* Copyright (c) Carnegie Mellon University. All rights reserved.
*
* All use of this software is subject to the terms of the revised BSD
* license. You should have received a copy of this license along
* with this source code in a file named "LICENSE."
*
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
#ifdef WITHIN_PANDA
#include "dtoolbase.h"
#endif
#include <getopt.h>
#include <stdio.h>
#include <errno.h>
#include <string.h> /* for strerror */
#include <unistd.h>
#include <sys/types.h>
#include <sys/wait.h>
#include <sys/stat.h>
#include <fcntl.h>
#include <time.h>
#include <signal.h>
#include <stdlib.h>
#include <assert.h>
#include <pwd.h>
#include <grp.h>
#ifdef HAVE_LIBCURL
#include <curl/curl.h>
#endif
/* The maximum number of seconds to wait for a process to go away
after issuing SIGTERM. This is only used in watchdog mode, when -W
is provided on the command line. */
#define MAX_WAITTERM_SEC 10
char **params = NULL;
char *logfile_name = NULL;
char *pidfile_name = NULL;
int dont_fork = 0;
char *watchdog_url = NULL;
int watchdog_start_sec = 0;
int watchdog_cycle_sec = 0;
int watchdog_timeout_sec = 0;
char *startup_username = NULL;
char *startup_groupname = NULL;
char *startup_chdir = NULL;
int logfile_fd = -1;
int stop_on_terminate = 0;
int stop_always = 0;
char *respawn_script = NULL;
int respawn_count_time = 0;
/* If requested, delay these many seconds between restart attempts */
int respawn_delay_time = 5;
/* We shouldn't respawn more than (spam_respawn_count - 1) times over
spam_respawn_time seconds. */
int spam_respawn_count = 5;
int spam_respawn_time = 60;
int spam_restart_delay_time = 600; /* Optionally, do not exit if we spam too much; simply sleep for this many seconds*/
pid_t child_pid = 0;
pid_t watchdog_pid = 0;
#define TIME_BUFFER_SIZE 128
/* Keep track of the frequency with which we respawn, so we can report
this to our respawn script. */
typedef struct respawn_record_struct {
time_t _time;
struct respawn_record_struct *_next;
} respawn_record;
respawn_record *respawns = NULL;
int
record_respawn(time_t now) {
/* Records the respawning event in the respawn_record, and returns
the number of respawns in the last respawn_count_time
interval. */
respawn_record *rec;
respawn_record *next;
int count;
if (respawn_count_time <= 0) {
/* We're not tracking respawns if respawn_count_time is 0. */
return 0;
}
rec = (respawn_record *)malloc(sizeof(respawn_record));
rec->_time = now;
rec->_next = respawns;
respawns = rec;
/* Now walk through the rest of the list and count up the number of
respawn events until we reach a record more than
respawn_count_time seconds old. */
count = 0;
while (rec->_next != NULL &&
(now - rec->_time) <= respawn_count_time) {
rec = rec->_next;
count++;
}
/* The remaining respawn records get removed. */
next = rec->_next;
rec->_next = NULL;
while (next != NULL) {
rec = next;
next = rec->_next;
free(rec);
}
return count;
}
void
invoke_respawn_script(time_t now) {
char buffer[32];
char *new_command;
int new_command_length;
/* The process is about to be respawned; run the script that we were
given on the command line. */
if (respawn_count_time <= 0) {
/* We're not counting respawn times, so just run the script
directly. */
system(respawn_script);
} else {
/* We are counting respawn times, so append that information as a
parameter to the command. */
sprintf(buffer, " %d", record_respawn(now));
new_command_length = strlen(respawn_script) + strlen(buffer);
new_command = (char *)malloc(new_command_length + 1);
strcpy(new_command, respawn_script);
strcat(new_command, buffer);
assert(strlen(new_command) == new_command_length);
system(new_command);
free(new_command);
}
}
/* A callback function passed to libcurl that simply discards the data
retrieved from the server. We only care about the HTTP status. */
size_t
watchdog_bitbucket(void *ptr, size_t size, size_t nmemb, void *userdata) {
return size * nmemb;
}
/* Waits up to timeout_ms for a particular child to terminate.
Returns 0 if the timeout expires. */
pid_t
waitpid_timeout(pid_t child_pid, int *status_ptr, int timeout_ms) {
pid_t result;
struct timeval now, tv;
int now_ms, start_ms, elapsed_ms;
gettimeofday(&now, NULL);
start_ms = now.tv_sec * 1000 + now.tv_usec / 1000;
result = waitpid(child_pid, status_ptr, WNOHANG);
while (result == 0) {
gettimeofday(&now, NULL);
now_ms = now.tv_sec * 1000 + now.tv_usec / 1000;
elapsed_ms = now_ms - start_ms;
if (elapsed_ms > timeout_ms) {
/* Tired of waiting. */
return 0;
}
/* Yield the timeslice and wait some more. */
tv.tv_sec = 0;
tv.tv_usec = 1;
select(0, NULL, NULL, NULL, &tv);
result = waitpid(child_pid, status_ptr, WNOHANG);
}
if (result == -1) {
perror("waitpid");
}
return result;
}
/* Poll the requested URL until a failure or timeout occurs, or until
the child terminates on its own. Returns 1 on HTTP failure or
timeout, 0 on self-termination. In either case, *status_ptr is
filled in with the status value returned by waitpid().*/
int
do_watchdog(int *status_ptr) {
#ifndef HAVE_LIBCURL
fprintf(stderr, "Cannot watchdog; no libcurl available.\n");
return 0;
#else /* HAVE_LIBCURL */
CURL *curl;
CURLcode res;
char error_buffer[CURL_ERROR_SIZE];
pid_t wresult;
// Before we start polling the URL, wait at least start milliseconds.
wresult = waitpid_timeout(child_pid, status_ptr, watchdog_start_sec * 1000);
if (wresult == child_pid) {
// The child terminated on its own before we got started.
return 0;
}
curl = curl_easy_init();
if (!curl) {
fprintf(stderr, "Cannot watchdog; curl failed to init.\n");
return 0;
}
curl_easy_setopt(curl, CURLOPT_URL, watchdog_url);
/*curl_easy_setopt(curl, CURLOPT_VERBOSE, 1);*/
curl_easy_setopt(curl, CURLOPT_TIMEOUT_MS, watchdog_timeout_sec * 1000);
curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, watchdog_bitbucket);
curl_easy_setopt(curl, CURLOPT_ERRORBUFFER, error_buffer);
curl_easy_setopt(curl, CURLOPT_USERAGENT, "autorestart");
curl_easy_setopt(curl, CURLOPT_FOLLOWLOCATION, 1);
curl_easy_setopt(curl, CURLOPT_FRESH_CONNECT, 1);
curl_easy_setopt(curl, CURLOPT_FORBID_REUSE, 1);
res = curl_easy_perform(curl);
while (res == 0) {
/* 0: The HTTP request finished successfully (but might or might
not have returned an error code like a 404). */
long http_response = 0;
curl_easy_getinfo(curl, CURLINFO_RESPONSE_CODE, &http_response);
if ((http_response / 100) != 2) {
/* Anything in the 200 range is deemed success. Anything else
is deemed failure. */
fprintf(stderr, "%s returned %ld\n", watchdog_url, http_response);
break;
}
wresult = waitpid_timeout(child_pid, status_ptr, watchdog_cycle_sec * 1000);
if (wresult == child_pid) {
/* The process terminated on its own. Return 0 to indicate this. */
return 0;
}
res = curl_easy_perform(curl);
}
curl_easy_cleanup(curl);
/* Failed to retrieve the watchdog URL. */
if (res != 0) {
fprintf(stderr, "Failed to contact %s: %s\n", watchdog_url, error_buffer);
}
/* Kill the child process and wait for it to go away. */
kill(child_pid, SIGTERM);
pid_t result = waitpid_timeout(child_pid, status_ptr, MAX_WAITTERM_SEC * 1000);
if (result != child_pid) {
if (result == -1) {
perror("waitpid");
} else {
/* SIGTERM didn't make the process die. Try SIGKILL. */
fprintf(stderr, "Force-killing child process\n");
kill(child_pid, SIGKILL);
result = waitpid_timeout(child_pid, status_ptr, MAX_WAITTERM_SEC * 1000);
if (result == -1) {
perror("waitpid");
}
}
}
/* Return 1 to indicate we killed the child due to an HTTP error. */
return 1;
#endif /* HAVE_LIBCURL */
}
void
exec_process() {
/* First, output the command line to the log file. */
char **p;
for (p = params; *p != NULL; ++p) {
fprintf(stderr, "%s ", *p);
}
fprintf(stderr, "\n");
execvp(params[0], params);
fprintf(stderr, "Cannot exec %s: %s\n", params[0], strerror(errno));
/* Exit with a status of 0, to indicate to the parent process that
we should stop. */
exit(0);
}
int
spawn_process() {
/* Spawns the child process. Returns true if the process terminated
by itself and should be respawned, false if it was explicitly
killed (or some other error condition exists), and it should not
respawn any more. */
pid_t wresult;
int status;
int error_exit;
child_pid = fork();
if (child_pid < 0) {
/* Fork error. */
perror("fork");
return 0;
}
if (child_pid == 0) {
/* Child. Exec the process. */
fprintf(stderr, "Child pid is %d.\n", getpid());
exec_process();
/* Shouldn't get here. */
exit(1);
}
/* Parent. */
error_exit = 0;
if (watchdog_url != NULL) {
/* If we're watchdogging, then go check the URL. This function
won't return until the URL fails or the child exits. */
error_exit = do_watchdog(&status);
} else {
/* If we're not watchdogging, then just wait for the child to
terminate, and diagnose the reason. */
wresult = waitpid(child_pid, &status, 0);
if (wresult < 0) {
perror("waitpid");
return 0;
}
}
/* Now that we've returned from waitpid, clear the child pid number
so our signal handler doesn't get too confused. */
child_pid = 0;
if (error_exit) {
/* An HTTP error exit is a reason to respawn. */
return 1;
} else if (WIFSIGNALED(status)) {
int signal = WTERMSIG(status);
fprintf(stderr, "\nprocess caught signal %d.\n\n", signal);
/* A signal exit is a reason to respawn unless the signal is TERM
or KILL. */
return !stop_on_terminate || (signal != SIGTERM && signal != SIGKILL);
} else {
int exit_status = WEXITSTATUS(status);
fprintf(stderr, "\nprocess exited with status %d.\n\n", WEXITSTATUS(status));
/* Normal exit is a reason to respawn if the status indicates failure. */
return !stop_on_terminate || (exit_status != 0);
}
}
void
sigterm_handler() {
pid_t wresult;
int status;
time_t now;
char time_buffer[TIME_BUFFER_SIZE];
now = time(NULL);
strftime(time_buffer, TIME_BUFFER_SIZE, "%T on %A, %d %b %Y", localtime(&now));
fprintf(stderr, "\nsigterm caught at %s; shutting down.\n", time_buffer);
if (child_pid == 0) {
fprintf(stderr, "no child process.\n\n");
} else {
kill(child_pid, SIGTERM);
wresult = waitpid(child_pid, &status, 0);
if (wresult < 0) {
perror("waitpid");
} else {
fprintf(stderr, "child process terminated.\n\n");
}
}
exit(1);
}
void
sighup_handler() {
time_t now;
char time_buffer[TIME_BUFFER_SIZE];
now = time(NULL);
strftime(time_buffer, TIME_BUFFER_SIZE, "%T on %A, %d %b %Y", localtime(&now));
fprintf(stderr, "\nsighup caught at %s.\n", time_buffer);
if (child_pid == 0) {
fprintf(stderr, "no child process.\n\n");
} else {
kill(child_pid, SIGHUP);
}
}
void
sigalarm_handler() {
fprintf(stderr, "sleep epoch was complete.\n");
}
void
do_autorestart() {
char time_buffer[TIME_BUFFER_SIZE];
time_t now;
time_t *spam_respawn = NULL;
int sri, num_sri;
struct sigaction sa;
if (spam_respawn_count > 1) {
spam_respawn = (time_t *)malloc(sizeof(time_t) * spam_respawn_count);
}
/* Make our process its own process group. */
setpgid(0, 0);
/* Set up a signal handler to trap SIGTERM. */
sa.sa_handler = sigterm_handler;
sigemptyset(&sa.sa_mask);
sa.sa_flags = 0;
if (sigaction(SIGTERM, &sa, NULL) < 0) {
perror("sigaction");
}
/* Set up a signal handler to trap SIGHUP. We pass this into the
child. */
sa.sa_handler = sighup_handler;
sigemptyset(&sa.sa_mask);
sa.sa_flags = 0;
if (sigaction(SIGHUP, &sa, NULL) < 0) {
perror("sigaction");
}
if (logfile_fd >= 0) {
/* If we have a logfile, dup it onto stdout and stderr. */
dup2(logfile_fd, STDOUT_FILENO);
dup2(logfile_fd, STDERR_FILENO);
close(logfile_fd);
}
/* Make sure stdin is closed. */
close(STDIN_FILENO);
now = time(NULL);
strftime(time_buffer, TIME_BUFFER_SIZE, "%T on %A, %d %b %Y", localtime(&now));
fprintf(stderr, "autorestart begun at %s.\n", time_buffer);
if (pidfile_name != NULL) {
unlink(pidfile_name);
FILE *pidfile = fopen(pidfile_name, "w");
if (pidfile == NULL) {
fprintf(stderr, "Could not write pidfile %s\n", pidfile_name);
} else {
fprintf(pidfile, "%d\n", getpid());
fclose(pidfile);
}
}
sri = 1;
num_sri = 1;
if (spam_respawn_count > 1) {
spam_respawn[1] = now;
}
while (spawn_process()) {
now = time(NULL);
if (respawn_script != NULL) {
invoke_respawn_script(now);
}
if (respawn_delay_time) {
sleep(respawn_delay_time);
}
/* Make sure we're not respawning too fast. */
if (spam_respawn_count > 1) {
sri = (sri + 1) % spam_respawn_count;
spam_respawn[sri] = now;
if (num_sri < spam_respawn_count) {
num_sri++;
} else {
time_t last = spam_respawn[(sri + 1) % spam_respawn_count];
if (now - last < spam_respawn_time)
{
if(!spam_restart_delay_time)
{
fprintf(stderr, "respawning too fast, giving up.\n");
break;
}
else
{
num_sri = 1; /* reset num_sri */
fprintf(stderr, "respawning too fast, will sleep for %d seconds.\n", spam_restart_delay_time);
signal (SIGALRM, sigalarm_handler);
alarm(spam_restart_delay_time);
pause();
signal (SIGALRM, SIG_IGN);
}
}
}
}
if (stop_always) {
fprintf(stderr, "instructed to not autorestart, exiting.\n");
break;
}
strftime(time_buffer, TIME_BUFFER_SIZE, "%T on %A, %d %b %Y", localtime(&now));
fprintf(stderr, "respawning at %s.\n", time_buffer);
}
now = time(NULL);
strftime(time_buffer, TIME_BUFFER_SIZE, "%T on %A, %d %b %Y", localtime(&now));
fprintf(stderr, "autorestart terminated at %s.\n", time_buffer);
exit(0);
}
void
double_fork() {
pid_t child, grandchild, wresult;
int status;
/* Fork once, then again, to disassociate the child from the command
shell process group. */
child = fork();
if (child < 0) {
/* Failure to fork. */
perror("fork");
exit(1);
}
if (child == 0) {
/* Child. Fork again. */
grandchild = fork();
if (grandchild < 0) {
perror("fork");
exit(1);
}
if (grandchild == 0) {
/* Grandchild. Begin useful work. */
do_autorestart();
/* Shouldn't get here. */
exit(1);
}
/* Child. Report the new pid, then terminate gracefully. */
fprintf(stderr, "Spawned, monitoring pid is %d.\n", grandchild);
exit(0);
}
/* Parent. Wait for the child to terminate, then return. */
wresult = waitpid(child, &status, 0);
if (wresult < 0) {
perror("waitpid");
exit(1);
}
if (!WIFEXITED(status)) {
if (WIFSIGNALED(status)) {
fprintf(stderr, "child caught signal %d unexpectedly.\n", WTERMSIG(status));
} else {
fprintf(stderr, "child exited with status %d.\n", WEXITSTATUS(status));
}
exit(1);
}
}
void
usage() {
fprintf(stderr,
"\n"
"autorestart [opts] program [args . . . ]\n"
"autorestart -h\n\n");
}
void
help() {
usage();
fprintf(stderr,
"This program is used to run a program as a background task and\n"
"automatically restart it should it terminate for any reason other\n"
"than normal exit or explicit user kill.\n\n"
"If the program exits with a status of 0, indicating successful\n"
"completion, it is not restarted.\n\n"
"If the program is terminated via a TERM or KILL signal (e.g. via\n"
"kill [pid] or kill -9 [pid]), it is assumed the user meant for the\n"
"process to stop, and it is not restarted.\n\n"
"Options:\n\n"
" -l logfilename\n"
" Route stdout and stderr from the child process into the indicated\n"
" log file.\n\n"
" -p pidfilename\n"
" Write the pid of the monitoring process to the indicated pidfile.\n\n"
" -f\n"
" Don't fork autorestart itself; run it as a foreground process. \n"
" (Normally, autorestart forks itself to run as a background process.)\n"
" In this case, the file named by -p is not used.\n\n"
" -n\n"
" Do not attempt to restart the process under any circumstance.\n"
" The program can still be used to execute a script on abnormal\n"
" process termination.\n\n"
" -t\n"
" Stop on terminate: don't restart if the child process exits\n"
" normally or is killed with a SIGTERM. With this flag, the\n"
" child process will be restarted only if it exits with a\n"
" non-zero exit status, or if it is killed with a signal other\n"
" than SIGTERM. Without this flag, the default behavior is to\n"
" restart the child process if it exits for any reason.\n\n"
" -r count,secs,sleep\n"
" Sleep 'sleep' seconds if the process respawns 'count' times\n"
" within 'secs' seconds. This is designed to prevent respawning\n"
" from using too many system resources if something is wrong with\n"
" the child process. The default value is %d,%d,%d. Use -r 0,0,0\n"
" to disable this feature.\n\n"
" -s \"command\"\n"
" Run the indicated command or script each time the process is\n"
" respawned, using the system() call. This may be useful, for\n"
" instance, to notify an operator via email each time a respawn\n"
" occurs. If -c is also specified, an additional parameter will\n"
" be appended to the command, indicating the number of times the\n"
" respawn has occurred in the given time interval.\n\n"
" -c secs\n"
" Specifies the number of seconds over which to count respawn events\n"
" for the purposes of passing an argument to the script named with\n"
" -s.\n\n"
" -d secs\n"
" Specifies the number of seconds to delay for between restarts.\n"
" The default is %d.\n\n"
#ifdef HAVE_LIBCURL
" -W watchdog_url,start,cycle,timeout\n"
" Specifies an optional URL to watch while waiting for the process\n"
" to terminate. If this is specified, autorestart will start the process,\n"
" wait start seconds, and then repeatedly poll the indicated URL\n"
" every cycle seconds. If a HTTP failure code is detected,\n"
" or no response is received within timeout seconds, then the\n"
" child is terminated and restarted. The start, cycle, and timeout\n"
" parameters are all required.\n\n"
#endif /* HAVE_LIBCURL */
" -U username\n"
" Change to the indicated user upon startup. The logfile is still\n"
" created as the initial user.\n\n"
" -G groupname\n"
" Change to the indicated group upon startup.\n\n"
" -D dirname\n"
" Change to the indicated working directory upon startup. The logfile\n"
" is still created relative to the initial startup directory.\n\n"
" -h\n"
" Output this help information.\n\n",
spam_respawn_count, spam_respawn_time, spam_restart_delay_time, respawn_delay_time);
}
void
parse_int_triplet(char *param, int *a, int *b, int *c) {
char *comma;
char *comma2;
comma = strchr(param, ',');
if (comma == NULL) {
fprintf(stderr, "Comma required: %s\n", param);
exit(1);
}
comma2 = strchr(comma+1, ',');
if (comma2 == NULL) {
fprintf(stderr, "Second comma required: %s\n", param);
exit(1);
}
*comma = '\0';
*comma2 = '\0';
*a = atoi(param);
*b = atoi(comma + 1);
*c = atoi(comma2 + 1);
}
void
parse_watchdog(char *param) {
char *comma;
char *comma2;
char *comma3;
#ifndef HAVE_LIBCURL
fprintf(stderr, "-W requires autorestart to have been compiled with libcurl support.\n");
exit(1);
#endif /* HAVE_LIBCURL */
comma = strrchr(param, ',');
if (comma == NULL) {
fprintf(stderr, "Comma required: %s\n", param);
exit(1);
}
*comma = '\0';
comma2 = strrchr(param, ',');
if (comma2 == NULL) {
*comma = ',';
fprintf(stderr, "Second comma required: %s\n", param);
exit(1);
}
*comma2 = '\0';
comma3 = strrchr(param, ',');
if (comma3 == NULL) {
*comma = ',';
*comma2 = ',';
fprintf(stderr, "Third comma required: %s\n", param);
exit(1);
}
*comma3 = '\0';
watchdog_url = param;
watchdog_start_sec = atoi(comma3 + 1);
watchdog_cycle_sec = atoi(comma2 + 1);
watchdog_timeout_sec = atoi(comma + 1);
}
int
main(int argc, char *argv[]) {
extern char *optarg;
extern int optind;
/* The initial '+' instructs GNU getopt not to reorder switches. */
static const char *optflags = "+l:p:fntr:s:c:d:W:U:G:D:h";
int flag;
flag = getopt(argc, argv, optflags);
while (flag != EOF) {
switch (flag) {
case 'l':
logfile_name = optarg;
break;
case 'p':
pidfile_name = optarg;
break;
case 'f':
dont_fork = 1;
break;
case 'n':
stop_always = 1;
break;
case 't':
stop_on_terminate = 1;
break;
case 'r':
parse_int_triplet(optarg, &spam_respawn_count, &spam_respawn_time, &spam_restart_delay_time);
break;
case 's':
respawn_script = optarg;
break;
case 'c':
respawn_count_time = atoi(optarg);
break;
case 'd':
respawn_delay_time = atoi(optarg);
break;
case 'W':
parse_watchdog(optarg);
break;
case 'U':
startup_username = optarg;
break;
case 'G':
startup_groupname = optarg;
break;
case 'D':
startup_chdir = optarg;
break;
case 'h':
help();
return 1;
case '?':
case '+':
usage();
return 1;
default:
fprintf(stderr, "Unhandled switch: -%c\n", flag);
return 1;
}
flag = getopt(argc, argv, optflags);
}
argc -= (optind - 1);
argv += (optind - 1);
if (argc < 2) {
fprintf(stderr, "No program to execute given.\n");
usage();
return 1;
}
params = &argv[1];
if (logfile_name != NULL) {
logfile_fd = open(logfile_name, O_WRONLY | O_CREAT | O_TRUNC, 0666);
if (logfile_fd < 0) {
fprintf(stderr, "Cannot write to logfile %s: %s\n",
logfile_name, strerror(errno));
return 1;
}
fprintf(stderr, "Generating output to %s.\n", logfile_name);
}
if (startup_chdir != NULL) {
if (chdir(startup_chdir) != 0) {
perror(startup_chdir);
return 1;
}
}
if (startup_groupname != NULL) {
struct group *grp;
grp = getgrnam(startup_groupname);
if (grp == NULL) {
perror(startup_groupname);
return 1;
}
if (setgid(grp->gr_gid) != 0) {
perror(startup_groupname);
return 1;
}
}
if (startup_username != NULL) {
struct passwd *pwd;
pwd = getpwnam(startup_username);
if (pwd == NULL) {
perror(startup_username);
return 1;
}
if (setuid(pwd->pw_uid) != 0) {
perror(startup_username);
return 1;
}
}
if (dont_fork) {
do_autorestart();
} else {
double_fork();
}
return 0;
}

View File

@@ -643,8 +643,7 @@ RECURSIVE = YES
# excluded from the INPUT source files. This way you can easily exclude a
# subdirectory from a directory tree whose root is specified with the INPUT tag.
EXCLUDE = built/direct/test \
built/direct/plugin \
EXCLUDE = built/direct/plugin \
built/direct/plugin_npapi \
built/direct/plugin_activex \
built/direct/plugin_installer \

View File

@@ -1,29 +0,0 @@
// DIR_TYPE "metalib" indicates we are building a shared library that
// consists mostly of references to other shared libraries. Under
// Windows, this directly produces a DLL (as opposed to the regular
// src libraries, which don't produce anything but a pile of OBJ files
// under Windows).
#define DIR_TYPE metalib
// This directory strictly contains a Python utility; therefore, only
// build it if we actually have Python.
#define BUILD_DIRECTORY $[HAVE_PYTHON]
#define OTHER_LIBS \
pandaexpress:m \
p3dconfig:c p3dtoolconfig:m \
p3dtoolutil:c p3dtoolbase:c p3prc:c p3dtool:m
#begin metalib_target
#define TARGET p3heapq
// Tell ppremake to treat this file as if it had been generated via
// interrogate. On OSX, this will move it into the .so, instead of
// the .dylib, so that it can be imported into Python.
#define PYTHON_MODULE_ONLY 1
#define SOURCES heapq.cxx
#end metalib_target

View File

@@ -1,240 +0,0 @@
/* Note: This module can probably go away when we upgrade to Python 2.4.
Python 2.3 has a heapq implementation, but it is in Python. This is
reported to be about 20x faster. In 2.4 they reimplemented heapq in C so
it should be comparable to this. At this time though, Python 2.4 is
still in alpha.
Note: This code has been bastardized to only work on Tasks temporarily.
*/
#include <Python.h>
/* Prototypes */
static PyObject * heappush(PyObject *self, PyObject *args);
static PyObject * heappop(PyObject *self, PyObject *args);
static PyObject * heapreplace(PyObject *self, PyObject *args);
static PyObject * heapify(PyObject *self, PyObject *args);
static int _siftdown(PyObject *list, int startpos, int pos);
static int _siftup(PyObject *list, int pos);
#ifdef _WIN32
extern "C" __declspec(dllexport) void initlibheapq(void);
extern "C" __declspec(dllexport) void initlibp3heapq(void);
#else
extern "C" void initlibheapq();
extern "C" void initlibp3heapq();
#endif
static PyObject *
heappush(PyObject *self, PyObject *args) {
int len;
PyObject *list = NULL;
PyObject *node = NULL;
if (!PyArg_ParseTuple(args,"O!O",&PyList_Type,&list,&node))
return NULL;
len = PyList_Size(list);
if (PyList_Append(list,node))
return NULL;
if (_siftdown(list,0,len))
return NULL;
Py_INCREF(Py_None);
return Py_None;
}
static PyObject *
heappop(PyObject *self, PyObject *args) {
PyObject *list = NULL;
PyObject *node = NULL;
PyObject *returnNode = NULL;
int len;
if (!PyArg_ParseTuple(args,"O!",&PyList_Type,&list))
return NULL;
len = PyList_Size(list);
if (len == 0) {
/* Special-case most common failure cause */
PyErr_SetString(PyExc_IndexError, "pop from empty list");
return NULL;
}
node = PySequence_GetItem(list,-1);
PySequence_DelItem(list,-1);
len -= 1;
if (len > 0) {
returnNode = PySequence_GetItem(list,0);
PyList_SetItem(list,0,node);
if (_siftup(list,0))
return NULL;
} else {
returnNode = node;
}
return returnNode;
}
static PyObject *
heapreplace(PyObject *self, PyObject *args) {
PyObject *list = NULL;
PyObject *node = NULL;
PyObject *returnNode = NULL;
int len;
if (!PyArg_ParseTuple(args,"O!O",&PyList_Type,&list,&node))
return NULL;
len = PyList_Size(list);
if (len == 0) {
/* Special-case most common failure cause */
PyErr_SetString(PyExc_IndexError, "replace on an empty list");
return NULL;
}
returnNode = PySequence_GetItem(list,0);
PySequence_SetItem(list,0,node);
if (_siftup(list,0))
return NULL;
return returnNode;
}
static PyObject *
heapify(PyObject *self, PyObject *args) {
int n, i;
PyObject *list;
if (!PyArg_ParseTuple(args,"O!",&PyList_Type,&list))
return NULL;
n = (PyList_Size(list)/2)-1;
for (i=n;i>=0;i--) {
if (_siftup(list,i))
return NULL;
}
Py_INCREF(Py_None);
return Py_None;
}
static int
_siftdown(PyObject *list, int startpos, int pos) {
PyObject *newitem, *parent;
int parentpos;
newitem = PySequence_GetItem(list,pos);
PyObject *newitem_wakeTime_obj = PyObject_GetAttrString(newitem, "wakeTime");
double newitem_wakeTime = 0.0;
if (newitem_wakeTime_obj != NULL) {
newitem_wakeTime = PyFloat_AS_DOUBLE(newitem_wakeTime_obj);
Py_DECREF(newitem_wakeTime_obj);
}
while (pos > startpos) {
parentpos = (pos - 1) >> 1;
parent = PyList_GetItem(list,parentpos);
/*
cmp = PyObject_RichCompareBool(parent,newitem,Py_LE);
if (cmp > 0)
break;
else if (cmp < 0)
return -1;
*/
PyObject *parent_wakeTime_obj = PyObject_GetAttrString(parent, "wakeTime");
double parent_wakeTime = 0.0;
if (parent_wakeTime_obj != NULL) {
parent_wakeTime = PyFloat_AS_DOUBLE(parent_wakeTime_obj);
Py_DECREF(parent_wakeTime_obj);
}
if (parent_wakeTime <= newitem_wakeTime) {
break;
}
Py_INCREF(parent);
PyList_SetItem(list,pos,parent);
pos = parentpos;
}
PyList_SetItem(list,pos,newitem);
return 0;
}
static int
_siftup(PyObject *list, int pos) {
PyObject *newitem, *right, *child;
int endpos, rightpos, childpos;
int startpos = pos;
endpos = PyList_Size(list);
newitem = PySequence_GetItem(list,pos);
childpos = (2*pos)+1;
while (childpos < endpos) {
rightpos = childpos + 1;
child = PySequence_Fast_GET_ITEM(list,childpos);
PyObject *child_wakeTime_obj = PyObject_GetAttrString(child, "wakeTime");
double child_wakeTime = 0.0;
if (child_wakeTime_obj != NULL) {
child_wakeTime = PyFloat_AS_DOUBLE(child_wakeTime_obj);
Py_DECREF(child_wakeTime_obj);
}
if (rightpos < endpos) {
right = PySequence_Fast_GET_ITEM(list,rightpos);
PyObject *right_wakeTime_obj = PyObject_GetAttrString(right, "wakeTime");
double right_wakeTime = 0.0;
if (right_wakeTime_obj != NULL) {
right_wakeTime = PyFloat_AS_DOUBLE(right_wakeTime_obj);
Py_DECREF(right_wakeTime_obj);
}
/*
cmp = PyObject_RichCompareBool(right,child,Py_LE);
if (cmp > 0)
childpos = rightpos;
else if (cmp < 0)
return -1;
*/
if (right_wakeTime <= child_wakeTime) {
childpos = rightpos;
}
}
child = PySequence_GetItem(list,childpos);
PyList_SetItem(list,pos,child);
pos = childpos;
childpos = (2*pos)+1;
}
PyList_SetItem(list,pos,newitem);
return _siftdown(list,startpos,pos);
}
static PyMethodDef heapqcMethods[] = {
{"heappush",heappush,METH_VARARGS},
{"heappop",heappop,METH_VARARGS},
{"heapreplace",heapreplace,METH_VARARGS},
{"heapify",heapify,METH_VARARGS},
{NULL, NULL} /* Sentinel */
};
void initlibheapq(void) {
(void) Py_InitModule("libheapq", heapqcMethods);
};
void initlibp3heapq(void) {
(void) Py_InitModule("libp3heapq", heapqcMethods);
};

View File

@@ -3,7 +3,7 @@ from direct.directnotify.DirectNotifyGlobal import directNotify
from pandac.PandaModules import VirtualFileSystem
from pandac.PandaModules import Filename
from pandac.PandaModules import DSearchPath
from direct.showbase import ElementTree as ET
import xml.etree.ElementTree as ET
import LandingPageHTML
from StringIO import StringIO

View File

@@ -1,6 +1,6 @@
# -- Text content for the landing page. You should change these for yours! --
from direct.showbase import ElementTree as ET
import xml.etree.ElementTree as ET
title = "Landing Page"
defaultTitle = title

View File

@@ -4,7 +4,7 @@ from direct.directnotify.DirectNotifyGlobal import directNotify
from direct.task.TaskManagerGlobal import taskMgr
from direct.task import Task
from LandingPage import LandingPage
from direct.showbase import ElementTree as ET
import xml.etree.ElementTree as ET
notify = directNotify.newCategory('WebRequestDispatcher')

View File

@@ -1,504 +0,0 @@
import string
import pprint
import sys
import os
import ConfigParser
import pprint
import shutil
import tempfile
import ltoc
import tocfilter
import resource
import archive
import archivebuilder
import carchive
logfile = None
autopath = []
built = {}
copyFile = None
class Target:
def __init__(self, cfg, sectnm, cnvrts):
self.children = []
self._dependencies = ltoc.lTOC() # the stuff an outer package will need to use me
self.cfg = cfg
self.__name__ = 'joe'
for optnm in cfg.options(sectnm):
cnvrt = cnvrts.get(optnm, 'getstringlist')
if cnvrt:
f = getattr(self, cnvrt, None)
if f:
self.__dict__[optnm] = f(cfg.get(sectnm, optnm))
if not hasattr(self, 'name'):
self.name = self.__name__
print "Initializing", self.__name__
self.pathprefix = autopath + self.pathprefix
self.pathprefix.append(os.path.join(pyinsthome, 'support'))
for z in self.zlib:
if z in self.cfg.sections():
self.children.append(z)
else:
raise ValueError, "%s - zlib '%s' does not refer to a sections" \
% (self.name, z)
for i in range(len(self.misc)):
x = self.misc[i]
if x in self.cfg.sections():
if self.cfg.get(x, "type") == 'PYZ':
self.zlib.append(x)
self.misc[i] = None
self.children.append(x)
self.misc = filter(None, self.misc)
self.edit()
self.toc = ltoc.lTOC()
for thingie in self.excludes:
try:
fltr = tocfilter.makefilter(thingie, self.pathprefix)
except ValueError:
print "Warning: '%s' not found - no filter created" % thingie
else:
self.toc.addFilter(fltr)
if self.exstdlib:
self.toc.addFilter(tocfilter.StdLibFilter())
if self.extypes:
self.toc.addFilter(tocfilter.ExtFilter(self.extypes))
if self.expatterns:
self.toc.addFilter(tocfilter.PatternFilter(self.expatterns))
##------utilities------##
def dump(self):
logfile.write("---- %s: %s -----\n" % (self.__class__.__name__, self.name))
pprint.pprint(self.__dict__, logfile)
def getstringlist(self, opt):
tmp = string.split(opt, ',')
return filter(None, map(string.strip, tmp))
def getstring(self, opt):
return opt
def getbool(self, opt):
if opt in ('0','f','F','n','N'):
return 0
return 1
##-----framework-----##
def build(self):
print "Gathering components of %s" % self.name
self.gather()
logfile.write("Final Table of Contents for %s:\n" % self.name)
pprint.pprint(self.toc.toList(), logfile)
print "Creating %s" % self.name
self.assemble()
##-----overrideables-----##
def edit(self):
pass
def gather(self):
pass
def assemble(self):
pass
class PYZTarget(Target):
def __init__(self, cfg, sectnm, cnvrts):
Target.__init__(self, cfg, sectnm, cnvrts)
# to use a PYZTarget, you'll need imputil and archive
archivebuilder.GetCompiled([os.path.join(pyinsthome, 'imputil.py')])
print "pyinsthome:", pyinsthome
imputil = resource.makeresource('imputil.py', [pyinsthome])
self._dependencies.append(imputil)
archivebuilder.GetCompiled([os.path.join(pyinsthome, 'archive_rt.py')])
archmodule = resource.makeresource('archive_rt.py', [pyinsthome])
self._dependencies.merge(archmodule.dependencies())
self._dependencies.append(archmodule)
self.toc.addFilter(archmodule)
self.toc.addFilter(imputil)
for mod in archmodule.modules:
self.toc.addFilter(mod)
def edit(self):
if self.extypes:
print "PYZ target %s ignoring extypes = %s" % (self.__name__, self.extypes)
def gather(self):
for script in self.dependencies:
rsrc = resource.makeresource(script, self.pathprefix)
if not isinstance(rsrc, resource.scriptresource):
print "Bug alert - Made %s from %s!" % (rsrc, script)
self.toc.merge(rsrc.modules)
logfile.write("lTOC after expanding 'depends':\n")
pprint.pprint(self.toc.toList(), logfile)
for thingie in self.includes + self.directories + self.packages:
rsrc = resource.makeresource(thingie, self.pathprefix)
## if not isinstance(rsrc, resource.pythonresource):
## print "PYZ target %s ignoring include %s" % (self.name, thingie)
## else:
self.toc.merge(rsrc.contents())
logfile.write("lTOC after includes, dir, pkgs:\n")
pprint.pprint(self.toc.toList(), logfile)
self.toc.addFilter(tocfilter.ExtFilter(['.py', '.pyc', '.pyo'], 1))
logfile.write("Applying the following filters:\n")
pprint.pprint(self.toc.filters, logfile)
self.toc.filter()
def assemble(self):
contents = self.toc.toList()
if contents:
lib = archive.ZlibArchive()
lib.build(self.name, archivebuilder.GetCompiled(self.toc.toList()))
class CollectTarget(Target):
def __init__(self, cfg, sectnm, cnvrts):
Target.__init__(self, cfg, sectnm, cnvrts)
_rsrcdict = {'COLLECT': resource.dirresource, 'PYZ': resource.zlibresource, 'CARCHIVE': resource.archiveresource}
def gather(self):
if self.support:
# the bare minimum
self.toc.merge([resource.makeresource('python20.dll')])
self.toc.merge([resource.makeresource('exceptions.pyc').asBinary()])
# zlib, bindepends, misc, trees, destdir
for i in range(len(self.zlib)):
# z refers to the section name
z = self.zlib[i]
nm = self.cfg.get(z, 'name')
try:
self.toc.merge([resource.makeresource(nm, ['.'])])
except ValueError:
# zlibs aren't written if they turn out to be empty
self.zlib[i] = None
self.zlib = filter(None, self.zlib)
if self.zlib:
target = built.get(self.zlib[0], None)
if target:
self.toc.merge(target._dependencies)
for script in self.bindepends:
rsrc = resource.makeresource(script, self.pathprefix)
self.toc.merge(rsrc.binaries)
logfile.write('ltoc after bindepends:\n')
pprint.pprint(self.toc.toList(), logfile)
for thingie in self.misc:
if thingie in self.cfg.sections():
name = self.cfg.get(thingie, "name")
typ = self.cfg.get(thingie, "type")
klass = self._rsrcdict.get(typ, resource.dataresource)
rsrc = apply(klass, (name, name))
#now make sure we have the stuff the resource requires
target = built.get(thingie, None)
if target:
self.toc.merge(target._dependencies)
else:
rsrc = resource.makeresource(thingie, self.pathprefix)
self.toc.merge(rsrc.contents())
logfile.write('ltoc after misc:\n')
pprint.pprint(self.toc.toList(), logfile)
for script in self.script:
if string.find(script, '.') == -1:
script = script + '.py'
rsrc = resource.makeresource(script, self.pathprefix)
if rsrc.typ == 'm':
rsrc.typ = 's'
self.toc.merge([rsrc])
logfile.write('ltoc after scripts:\n')
pprint.pprint(self.toc.toList(), logfile)
for tree in self.trees:
try:
rsrc = resource.treeresource('.', tree)
except ValueError:
print "tree %s not found" % tree
else:
self.toc.merge(rsrc.contents())
logfile.write('ltoc after trees:\n')
pprint.pprint(self.toc.toList(), logfile)
self.toc.addFilter(tocfilter.TypeFilter(['d']))
logfile.write("Applying the following filters:\n")
pprint.pprint(self.toc.filters, logfile)
self.toc.filter()
#don't dupe stuff in a zlib that's part of this target
if self.zlib:
ztoc = ltoc.lTOC()
for zlibnm in self.zlib:
target = built.get(zlibnm, None)
if target:
ztoc.merge(target.toc)
for i in range(len(self.toc)-1, -1, -1):
rsrc = self.toc[i]
if isinstance(rsrc, resource.moduleresource) and rsrc in ztoc:
del self.toc[i]
def assemble(self):
if os.path.exists(self.name):
if os.path.isdir(self.name):
for fnm in os.listdir(self.name):
try:
os.remove(os.path.join(self.name, fnm))
except:
print "Could not delete file %s" % os.path.join(self.name, fnm)
else:
os.makedirs(self.name)
mysite = []
for nm, path, typ in self.toc.toList():
shutil.copy2(path, self.name)
if typ == 'z':
mysite.append('imputil.FuncImporter(archive.ZlibArchive("%s", 0).get_code).install()' % nm)
if mysite:
mysite.insert(0, 'import archive, imputil')
open(os.path.join(self.name, 'site.py'),'w').write(string.join(mysite, '\n'))
class ArchiveTarget(CollectTarget):
usefullname = 1
def __init__(self, cfg, sectnm, cnvrts):
CollectTarget.__init__(self, cfg, sectnm, cnvrts)
archivebuilder.GetCompiled([os.path.join(pyinsthome, 'carchive_rt.py')])
carchmodule = resource.makeresource('carchive_rt.py', [pyinsthome])
self._dependencies.merge(carchmodule.dependencies())
self._dependencies.append(carchmodule)
def edit(self):
if self.destdir:
print "Warning 'destdir = %s' ignored for %s" % (self.destdir, self.name)
def gather(self):
CollectTarget.gather(self)
_cdict = {'s':2,'m':1,'b':1,'x':1,'a':0,'z':0, 'p':1}
def assemble(self, pkgnm=None):
if pkgnm is None:
pkgnm = self.name
arch = carchive.CArchive()
toc = []
pytoc = []
for nm, path, typ in self.toc.toList():
compress = self._cdict[typ]
if typ == 'b' or (self.usefullname and typ in 'ms'):
nm = os.path.basename(path)
if typ == 'm':
pytoc.append((nm, path, compress, typ))
else:
toc.append((nm, path, compress, typ))
toc = toc + archivebuilder.GetCompiled(pytoc)
arch.build(pkgnm, toc)
return arch
class FullExeTarget(ArchiveTarget):
usefullname = 0
def __init__(self, cfg, sectnm, cnvrts):
ArchiveTarget.__init__(self, cfg, sectnm, cnvrts)
def gather(self):
for script in self.script:
#print "FullExeTarget.gather: script is", repr(script)
rsrc = resource.makeresource(script, self.pathprefix)
rsrc = resource.scriptresource(rsrc.name, rsrc.path)
#print " resource is", repr(rsrc)
self.toc.merge(rsrc.binaries)
ArchiveTarget.gather(self)
if not self.zlib:
self.toc.merge(rsrc.modules)
self._dependencies = ltoc.lTOC()
_cdict = {'s':2,'m':0,'b':1,'x':0,'a':0,'z':0}
_edict = { (1, 1):'Runw_d.exe', (1, 0):'Runw.exe', (0, 1):'Run_d.exe', (0, 0):'Run.exe'}
def assemble(self):
pkgname = tempfile.mktemp()
arch = ArchiveTarget.assemble(self, pkgname)
exe = self._edict[(self.userunw, self.debug)]
exe = os.path.normpath(os.path.join(pyinsthome, 'support', exe))
## copyFile([exe, pkgname], self.name)
## os.remove(pkgname)
# Thomas Heller's icon code
# my version
if self.icon:
myexe = tempfile.mktemp()
copyFile (exe, myexe)
try:
from icon import CopyIcons
CopyIcons(myexe, self.icon)
except ImportError:
print "win32api is required for updating icons"
print "You should have win32api.pyd and PyWinTypes20.dll"
print "in the installation directory."
print "Please copy them to Python's DLLS subdirectory"
print "(or install Mark Hammond's Win32 extensions)."
## iconfile = None
## for name in self.cfg.sections():
## if self.cfg.get (name, "type") == "STANDALONE":
## try:
## iconfile = self.cfg.get (name, "iconfile")
## except:
## pass
## if iconfile:
## from icon import CopyIcons
## CopyIcons (myexe, iconfile)
copyFile ([myexe, pkgname], self.name)
os.remove(myexe)
else:
copyFile([exe, pkgname], self.name)
#os.remove(pkgname)
class ExeTarget(FullExeTarget):
def __init__(self, cfg, sectnm, cnvrts):
FullExeTarget.__init__(self, cfg, sectnm, cnvrts)
def edit(self):
if not self.script:
raise ValueError, "EXE target %s requires 'script= <script>'" % self.__name__
def gather(self):
FullExeTarget.gather(self)
for i in range(len(self.toc)-1, -1, -1):
rsrc = self.toc[i]
if rsrc.typ == 'b':
self._dependencies.append(rsrc)
del self.toc[i]
installpreamble = """\
import sys, os
import installutils
import carchive_rt
idir = installutils.getinstalldir()
me = sys.argv[0]
if me[:-4] != '.exe':
me = me + '.exe'
this = carchive_rt.CArchive(sys.argv[0])
here = sys.path[0]
"""
mvfile = "installutils.copyFile(os.path.join(here, '%s'), os.path.join(idir, '%s'))\n"
extractfile = "open(os.path.join(idir, '%s'), 'wb').write(this.extract('%s')[1])\n"
sitepreamble = """\
import archive_rt
import imputil
import sys
"""
importzlib = "imputil.FuncImporter(archive_rt.ZlibArchive(sys.path[0]+'/%s').get_code).install()\n"
class InstallTarget(FullExeTarget):
def __init__(self, cfg, sectnm, cnvrts):
FullExeTarget.__init__(self, cfg, sectnm, cnvrts)
def edit(self):
if not self.script:
open('gen_install.py', 'w').write(installpreamble)
self.script = ['gen_install.py']
def gather(self):
FullExeTarget.gather(self)
if self.script[0] == 'gen_install.py':
f = open(self.script[0], 'a')
for rsrc in self.toc:
if isinstance(rsrc, resource.binaryresource):
nm = os.path.basename(rsrc.path)
f.write(mvfile % (nm, nm))
elif isinstance(rsrc, resource.pythonresource):
pass
elif isinstance(rsrc, resource.zlibresource):
pass
else:
f.write(extractfile % (rsrc.name, rsrc.name))
if isinstance(rsrc, resource.archiveresource):
#did it come with an install script?
target = built.get(rsrc.name, None)
if target:
if hasattr(target, "installscript"):
for script in target.installscript:
s = resource.makeresource(script, self.pathprefix)
txt = open(s.path, 'r').read()
f.write(txt)
f.close()
dispatch = {
'PYZ': PYZTarget,
'CARCHIVE': ArchiveTarget,
'COLLECT': CollectTarget,
'STANDALONE': ExeTarget,
'INSTALL': InstallTarget,
'FULLEXE': FullExeTarget,
}
def makeTarget(cfg, section):
return dispatch[cfg.get(section, 'type')](cfg, section, optcnvrts)
optdefaults = { 'type':'PYZ',
'script':'', # INSTALL (opt) & STANDALONE (required)
'zlib':'', # INSTALL, STANDALONE, COLLECT
'bindepends':'', # INSTALL, COLLECT
'misc':'', # INSTALL. COLLECT
'includetk': '0', # INSTALL, COLLECT
'userunw': '0', # STANDALONE
'dependencies':'', # PYZ
'directories':'', # PYZ
'excludes':'', # PYZ, INSTALL, COLLECT
'expatterns': '',
'exstdlib': '0',
'extypes': '',
'includes':'', # PYZ
'packages':'', # PYZ
'destdir':'', # COLLECT
'pathprefix': '',
'trees': '',
'debug': '0',
'support': '1', # include python20.dll & exceptons.pyc at a minimum
'icon': '',
}
optcnvrts = { 'type':'',
'name': 'getstring',
'exstdlib': 'getbool',
'console': 'getbool',
'analyze': 'getbool',
'debug': 'getbool',
'includetk': 'getbool',
'userunw': 'getbool',
'destdir': 'getstring',
'support': 'getbool',
'__name__': 'getstring',
'icon': 'getstring',
}
def main(opts, args):
global pyinsthome
global copyFile
pyinsthome = os.path.abspath(os.path.dirname(sys.argv[0]))
# sys.path.insert(0, os.path.join(pyinsthome, 'support'))
import installutils
copyFile = installutils.copyFile
global logfile
logfile = open('Builder.log','w')
targets = []
xref = {}
cfg = ConfigParser.ConfigParser(optdefaults)
for arg in args:
dirnm = os.path.dirname(arg)
if dirnm == '':
dirnm = '.'
autopath.append(os.path.abspath(dirnm))
cfg.read(args)
for section in cfg.sections():
target = makeTarget(cfg, section)
targets.append(target)
xref[section] = target
while targets:
for i in range(len(targets)):
target = targets[i]
for child in target.children:
if xref[child] in targets:
break
else: #no break - ready to build
target.dump()
target.build()
built[target.__name__] = target
built[target.name] = target
targets[i] = None
break
else: #no break - couldn't find anything to build
names = map(lambda x: getattr(x, 'name'), targets)
raise RuntimeError, "circular dependencies in %s" % repr(names)
targets = filter(None, targets)
def run(file):
main ([], file)
if __name__ == '__main__':
import getopt
(opts, args) = getopt.getopt(sys.argv[1:], 'dv')
print "opts:", opts
print "args:", args
main(opts, args)

View File

@@ -1,246 +0,0 @@
#
# Gordon McMillan (as inspired and influenced by Greg Stein)
#
# subclasses may not need marshal or struct, but since they're
# builtin, importing is safe.
#
# While an Archive is really an abstraction for any "filesystem
# within a file", it is tuned for use with imputil.FuncImporter.
# This assumes it contains python code objects, indexed by the
# the internal name (ie, no '.py').
# See carchive.py for a more general archive (contains anything)
# that can be understood by a C program.
import marshal
import struct
class Archive:
""" A base class for a repository of python code objects.
The get_code method is used by imputil.FuntionImporter
to get code objects by name.
Archives are flat namespaces, so conflict between module
names in different packages are possible. Use a different
Archive for each package.
"""
MAGIC = 'PYL\0'
HDRLEN = 12 # default is MAGIC followed by python's magic, int pos of toc
TOCPOS = 8
TRLLEN = 0 # default - no trailer
TOCTMPLT = {} #
os = None
def __init__(self, path=None, start=0):
"""
Initialize an Archive. If path is omitted, it will be an empty Archive.
start is the seek position within path where the Archive starts."""
self.toc = None
self.path = path
self.start = start
import imp
self.pymagic = imp.get_magic()
if path is not None:
self.lib = open(self.path, 'rb')
self.checkmagic()
self.loadtoc()
####### Sub-methods of __init__ - override as needed #############
def checkmagic(self):
"""Verify version and validity of file.
Overridable.
Check to see if the file object self.lib actually has a file
we understand.
"""
self.lib.seek(self.start) #default - magic is at start of file
if self.lib.read(len(self.MAGIC)) != self.MAGIC:
raise RuntimeError, "%s is not a valid %s archive file" \
% (self.path, self.__class__.__name__)
if self.lib.read(len(self.pymagic)) != self.pymagic:
raise RuntimeError, "%s has version mismatch to dll" % (self.path)
def loadtoc(self):
"""Load the table of contents.
Overridable.
Default: After magic comes an int (4 byte native) giving the
position of the TOC within self.lib.
Default: The TOC is a marshal-able string.
"""
self.lib.seek(self.start + self.TOCPOS)
(offset,) = struct.unpack('=i', self.lib.read(4))
self.lib.seek(self.start + offset)
self.toc = marshal.load(self.lib)
######## This is what is called by FuncImporter #######
## Since an Archive is flat, we ignore parent and modname.
def get_code(self, parent, modname, fqname):
"""The import hook.
Called by imputil.FunctionImporter.
Override extract to tune getting code from the Archive."""
rslt = self.extract(fqname) # None if not found, (ispkg, code) otherwise
if rslt is None:
return None
ispkg, code = rslt
if ispkg:
return ispkg, code, {'__path__': []}
return rslt
####### Core method - Override as needed #########
def extract(self, name):
""" Get the object corresponding to name, or None.
NAME is the name as specified in an 'import name'.
'import a.b' will become:
extract('a') (return None because 'a' is not a code object)
extract('a.__init__') (return a code object)
extract('a.b') (return a code object)
Default implementation:
self.toc is a dict
self.toc[name] is pos
self.lib has the code object marshal-ed at pos
"""
ispkg, pos = self.toc.get(name, (0, None))
if pos is None:
return None
self.lib.seek(self.start + pos)
return ispkg, marshal.load(self.lib)
########################################################################
# Informational methods
def contents(self):
"""Return a list of the contents.
Default implementation assumes self.toc is a dict like object.
"""
return self.toc.keys()
########################################################################
# Building
####### Top level method - shouldn't need overriding #######
def build(self, path, lTOC):
"""Create an archive file of name PATH from LTOC.
lTOC is a 'logical TOC' - a list of (name, path, ...)
where name is the internal (import) name,
and path is a file to get the object from, eg './a.pyc'.
"""
self.path = path
self.lib = open(path, 'wb')
#reserve space for the header
if self.HDRLEN:
self.lib.write('\0'*self.HDRLEN)
#create an empty toc
if type(self.TOCTMPLT) == type({}):
self.toc = {}
else: # assume callable
self.toc = self.TOCTMPLT()
for tocentry in lTOC:
self.add(tocentry) # the guts of the archive
tocpos = self.lib.tell()
self.save_toc(tocpos)
if self.TRLLEN:
self.save_trailer(tocpos)
if self.HDRLEN:
self.update_headers(tocpos)
self.lib.close()
####### manages keeping the internal TOC and the guts in sync #######
def add(self, entry):
"""Add an entry to the archive.
Override this to influence the mechanics of the Archive.
Assumes entry is a seq beginning with (nm, pth, ...) where
nm is the key by which we'll be asked for the object.
pth is the name of where we find the object.
"""
if self.os is None:
import os
self.os = os
nm = entry[0]
pth = entry[1]
ispkg = self.os.path.splitext(self.os.path.basename(pth))[0] == '__init__'
self.toc[nm] = (ispkg, self.lib.tell())
f = open(entry[1], 'rb')
f.seek(8) #skip magic and timestamp
self.lib.write(f.read())
def save_toc(self, tocpos):
"""Save the table of contents.
Default - toc is a dict
Gets marshaled to self.lib
"""
marshal.dump(self.toc, self.lib)
def save_trailer(self, tocpos):
"""Placeholder for Archives with trailers."""
pass
def update_headers(self, tocpos):
"""Update any header data.
Default header is MAGIC + Python's magic + tocpos"""
self.lib.seek(self.start)
self.lib.write(self.MAGIC)
self.lib.write(self.pymagic)
self.lib.write(struct.pack('=i', tocpos))
##############################################################
#
# ZlibArchive - an archive with compressed entries
#
class ZlibArchive(Archive):
"""A subclass of Archive that compresses entries with zlib
and uses a (marshalled) dict as a table of contents"""
MAGIC = 'PYZ\0'
TOCPOS = 8
HDRLEN = 12
TRLLEN = 0
TOCTMPLT = {}
LEVEL = 9
def __init__(self, path=None, offset=0):
Archive.__init__(self, path, offset)
# dynamic import so not imported if not needed
global zlib
import zlib
def extract(self, name):
"""Get the code object for NAME.
Return None if name is not in the table of contents.
Otherwise, return a tuple (ispkg, code)"""
(ispkg, pos, lngth) = self.toc.get(name, (0, None, 0))
if pos is None:
return None
self.lib.seek(self.start + pos)
return ispkg, marshal.loads(zlib.decompress(self.lib.read(lngth)))
def add(self, entry):
"""Add an entry.
ENTRY is a sequence where entry[0] is name and entry[1] is full path name.
zlib compress the code object, and build a toc entry"""
if self.os is None:
import os
self.os = os
nm = entry[0]
pth = entry[1]
ispkg = self.os.path.splitext(self.os.path.basename(pth))[0] == '__init__'
f = open(pth, 'rb')
f.seek(8) #skip magic and timestamp
obj = zlib.compress(f.read(), self.LEVEL)
self.toc[nm] = (ispkg, self.lib.tell(), len(obj))
self.lib.write(obj)

View File

@@ -1,226 +0,0 @@
#
# Gordon McMillan (as inspired and influenced by Greg Stein)
#
# subclasses may not need marshal or struct, but since they're
# builtin, importing is safe.
#
# While an Archive is really an abstraction for any "filesystem
# within a file", it is tuned for use with imputil.FuncImporter.
# This assumes it contains python code objects, indexed by the
# the internal name (ie, no '.py').
# See carchive.py for a more general archive (contains anything)
# that can be understood by a C program.
#archive_rt is a stripped down version of MEInc.Dist.archive.
#It has had all building logic removed.
#It's purpose is to bootstrap the Python installation.
import marshal
import struct
class Archive:
""" A base class for a repository of python code objects.
The extract method is used by imputil.ArchiveImporter
to get code objects by name (fully qualified name), so
an enduser "import a.b" would become
extract('a.__init__')
extract('a.b')
"""
MAGIC = 'PYL\0'
HDRLEN = 12 # default is MAGIC followed by python's magic, int pos of toc
TOCPOS = 8
TRLLEN = 0 # default - no trailer
TOCTMPLT = {} #
os = None
def __init__(self, path=None, start=0):
"Initialize an Archive. If path is omitted, it will be an empty Archive."
self.toc = None
self.path = path
self.start = start
import imp
self.pymagic = imp.get_magic()
if path is not None:
self.lib = open(self.path, 'rb')
self.checkmagic()
self.loadtoc()
####### Sub-methods of __init__ - override as needed #############
def checkmagic(self):
""" Overridable.
Check to see if the file object self.lib actually has a file
we understand.
"""
self.lib.seek(self.start) #default - magic is at start of file
if self.lib.read(len(self.MAGIC)) != self.MAGIC:
raise RuntimeError, "%s is not a valid %s archive file" \
% (self.path, self.__class__.__name__)
if self.lib.read(len(self.pymagic)) != self.pymagic:
raise RuntimeError, "%s has version mismatch to dll" % (self.path)
def loadtoc(self):
""" Overridable.
Default: After magic comes an int (4 byte native) giving the
position of the TOC within self.lib.
Default: The TOC is a marshal-able string.
"""
self.lib.seek(self.start + self.TOCPOS)
(offset,) = struct.unpack('=i', self.lib.read(4))
self.lib.seek(self.start + offset)
self.toc = marshal.load(self.lib)
######## This is what is called by FuncImporter #######
## Since an Archive is flat, we ignore parent and modname.
def get_code(self, parent, modname, fqname):
print "parent: ", parent
print "modname: ", modname
print "fqname: ", fqname
return self.extract(fqname) # None if not found, (ispkg, code) otherwise
if rslt is None:
return None
ispkg, code = rslt
if ispkg:
return ispkg, code, {'__path__': []}
return rslt
####### Core method - Override as needed #########
def extract(self, name):
""" Get the object corresponding to name, or None.
For use with imputil ArchiveImporter, object is a python code object.
'name' is the name as specified in an 'import name'.
'import a.b' will become:
extract('a') (return None because 'a' is not a code object)
extract('a.__init__') (return a code object)
extract('a.b') (return a code object)
Default implementation:
self.toc is a dict
self.toc[name] is pos
self.lib has the code object marshal-ed at pos
"""
ispkg, pos = self.toc.get(name, (0, None))
if pos is None:
return None
self.lib.seek(self.start + pos)
return ispkg, marshal.load(self.lib)
########################################################################
# Informational methods
def contents(self):
"""Return a list of the contents
Default implementation assumes self.toc is a dict like object.
Not required by ArchiveImporter.
"""
return self.toc.keys()
########################################################################
# Building
####### Top level method - shouldn't need overriding #######
## def build(self, path, lTOC):
## """Create an archive file of name 'path'.
## lTOC is a 'logical TOC' - a list of (name, path, ...)
## where name is the internal name, eg 'a'
## and path is a file to get the object from, eg './a.pyc'.
## """
## self.path = path
## self.lib = open(path, 'wb')
## #reserve space for the header
## if self.HDRLEN:
## self.lib.write('\0'*self.HDRLEN)
##
## #create an empty toc
##
## if type(self.TOCTMPLT) == type({}):
## self.toc = {}
## else: # assume callable
## self.toc = self.TOCTMPLT()
##
## for tocentry in lTOC:
## self.add(tocentry) # the guts of the archive
##
## tocpos = self.lib.tell()
## self.save_toc(tocpos)
## if self.TRLLEN:
## self.save_trailer(tocpos)
## if self.HDRLEN:
## self.update_headers(tocpos)
## self.lib.close()
##
##
## ####### manages keeping the internal TOC and the guts in sync #######
## def add(self, entry):
## """Override this to influence the mechanics of the Archive.
## Assumes entry is a seq beginning with (nm, pth, ...) where
## nm is the key by which we'll be asked for the object.
## pth is the name of where we find the object. Overrides of
## get_obj_from can make use of further elements in entry.
## """
## if self.os is None:
## import os
## self.os = os
## nm = entry[0]
## pth = entry[1]
## ispkg = self.os.path.splitext(self.os.path.basename(pth))[0] == '__init__'
## self.toc[nm] = (ispkg, self.lib.tell())
## f = open(entry[1], 'rb')
## f.seek(8) #skip magic and timestamp
## self.lib.write(f.read())
##
## def save_toc(self, tocpos):
## """Default - toc is a dict
## Gets marshaled to self.lib
## """
## marshal.dump(self.toc, self.lib)
##
## def save_trailer(self, tocpos):
## """Default - not used"""
## pass
##
## def update_headers(self, tocpos):
## """Default - MAGIC + Python's magic + tocpos"""
## self.lib.seek(self.start)
## self.lib.write(self.MAGIC)
## self.lib.write(self.pymagic)
## self.lib.write(struct.pack('=i', tocpos))
##############################################################
#
# ZlibArchive - an archive with compressed entries
#
class ZlibArchive(Archive):
MAGIC = 'PYZ\0'
TOCPOS = 8
HDRLEN = 12
TRLLEN = 0
TOCTMPLT = {}
LEVEL = 9
def __init__(self, path=None, offset=0):
Archive.__init__(self, path, offset)
# dynamic import so not imported if not needed
global zlib
import zlib
def extract(self, name):
(ispkg, pos, lngth) = self.toc.get(name, (0, None, 0))
if pos is None:
return None
self.lib.seek(self.start + pos)
return ispkg, marshal.loads(zlib.decompress(self.lib.read(lngth)))
## def add(self, entry):
## if self.os is None:
## import os
## self.os = os
## nm = entry[0]
## pth = entry[1]
## ispkg = self.os.path.splitext(self.os.path.basename(pth))[0] == '__init__'
## f = open(pth, 'rb')
## f.seek(8) #skip magic and timestamp
## obj = zlib.compress(f.read(), self.LEVEL)
## self.toc[nm] = (ispkg, self.lib.tell(), len(obj))
## self.lib.write(obj)
##

View File

@@ -1,81 +0,0 @@
# copyright 1999 McMillan Enterprises, Inc.
# license: use as you please. No warranty.
# Gordon McMillan gmcm@hypernet.com
#
# A collection of routines for building a logical Table Of Contents
# that Archive (subclasses) use to build themselves.
# A logical Table of Contents is a sequence, each element of which is
# a sequence, with at least 2 entries - "name" and "path".
import os
import string
import py_compile
def GetCompiled(seq, lvl='c'):
"""SEQ is a list of .py files, or a logical TOC.
Return as .pyc or .pyo files (LVL) after ensuring their existence"""
if len(seq) == 0:
return seq
rslt = []
isTOC = 0
if type(seq[0]) == type(()):
isTOC = 1
for py in seq:
if isTOC:
(nm, fnm), rest = py[:2], py[2:]
else:
fnm = py
fnm = os.path.splitext(fnm)[0] + '.py'
cmpl = 1
pyc = fnm + lvl
if os.path.exists(pyc):
pytm = long(os.stat(fnm)[8])
ctm = long(os.stat(pyc)[8])
if pytm < ctm:
cmpl = 0
if cmpl:
py_compile.compile(fnm, pyc)
if isTOC:
rslt.append((nm, pyc)+rest)
else:
rslt.append(pyc)
return rslt
import modulefinder
MF = modulefinder
import sys
def Dependencies(script):
"""Get a logical TOC directly from the dependencies of a script.
The returned TOC does NOT contain the script.
It does contain extension modules. Uses modulefinder."""
rslt = []
(dir, name) = os.path.split(script)
if dir:
ppath = [os.path.normpath(dir)] + sys.path
else:
ppath = sys.path[:]
mf = MF.ModuleFinder(ppath, 0)
try:
mf.run_script(script)
except IOError:
print " Script not found:", script
return []
del mf.modules['__main__']
for (k, v) in mf.modules.items():
if v.__file__ is None:
del mf.modules[k] # a builtin
for (k, v) in mf.modules.items():
#ispkg = os.path.basename(v.__file__) == '__init__.py'
d = os.path.dirname(v.__file__)
if not d:
v.__file__ = os.path.join(os.getcwd(), v.__file__)
#if ispkg:
# rslt.append(k+'.__init__', v.__file__)
#else:
rslt.append((k, v.__file__))
return rslt

View File

@@ -1,169 +0,0 @@
# copyright 1999 McMillan Enterprises, Inc.
# license: use as you please. No warranty.
#
# use dumpbin.exe (if present) to find the binary
# dependencies of an extension module.
# if dumpbin not available, pick apart the PE hdr of the binary
# while this appears to work well, it is complex and subject to
# problems with changes to PE hdrs (ie, this works only on 32 bit Intel
# Windows format binaries)
#
# Note also that you should check the results to make sure that the
# dlls are redistributable. I've listed most of the common MS dlls
# under "excludes" below; add to this list as necessary (or use the
# "excludes" option in the INSTALL section of the config file).
import os
import time
import string
import sys
import tempfile
import finder
seen = {}
excludes = {'KERNEL32.DLL':1,
'ADVAPI.DLL':1,
'MSVCRT.DLL':1,
'ADVAPI32.DLL':1,
'COMCTL32.DLL':1,
'CRTDLL.DLL':1,
'GDI32.DLL':1,
'MFC42.DLL':1,
'NTDLL.DLL':1,
'OLE32.DLL':1,
'OLEAUT32.DLL':1,
'RPCRT4.DLL':1,
'SHELL32.DLL':1,
'USER32.DLL':1,
'WINSPOOL.DRV':1,
'WS2HELP.DLL':1,
'WS2_32.DLL':1,
'WSOCK32.DLL':1,
'WINMM.DLL':1,
'COMDLG32.DLL':1,
'ZLIB.DLL':1,
'ODBC32.DLL':1,
'VERSION.DLL':1}
def getfullnameof(mod, xtrapath = None):
"""Return the full path name of MOD.
MOD is the basename of a dll or pyd.
XTRAPATH is a path or list of paths to search first.
Return the full path name of MOD.
Will search the full Windows search path, as well as sys.path"""
epath = finder.getpath()
if mod[-4:] in ('.pyd', '.PYD'):
epath = epath + sys.path
if xtrapath is not None:
if type(xtrapath) == type(''):
epath.insert(0, xtrapath)
else:
epath = xtrapath + epath
for p in epath:
npth = os.path.join(p, mod)
if os.path.exists(npth):
return npth
return ''
def getImports1(pth):
"""Find the binary dependencies of PTH.
This implementation (not used right now) uses the MSVC utility dumpbin"""
rslt = []
tmpf = tempfile.mktemp()
os.system('dumpbin /IMPORTS "%s" >%s' %(pth, tmpf))
time.sleep(0.1)
txt = open(tmpf,'r').readlines()
os.remove(tmpf)
i = 0
while i < len(txt):
tokens = string.split(txt[i])
if len(tokens) == 1 and string.find(tokens[0], '.') > 0:
rslt.append(string.strip(tokens[0]))
i = i + 1
return rslt
def getImports2(pth):
"""Find the binary dependencies of PTH.
This implementation walks through the PE header"""
import struct
rslt = []
try:
f = open(pth, 'rb').read()
pehdrd = struct.unpack('l', f[60:64])[0]
magic = struct.unpack('l', f[pehdrd:pehdrd+4])[0]
numsecs = struct.unpack('h', f[pehdrd+6:pehdrd+8])[0]
numdirs = struct.unpack('l', f[pehdrd+116:pehdrd+120])[0]
idata = ''
if magic == 17744:
importsec, sz = struct.unpack('2l', f[pehdrd+128:pehdrd+136])
secttbl = pehdrd + 120 + 8*numdirs
secttblfmt = '8s7l2h'
seclist = []
for i in range(numsecs):
seclist.append(struct.unpack(secttblfmt, f[secttbl+i*40:secttbl+(i+1)*40]))
#nm, vsz, va, rsz, praw, preloc, plnnums, qrelocs, qlnnums, flags \
# = seclist[-1]
for i in range(len(seclist)-1):
if seclist[i][2] <= importsec < seclist[i+1][2]:
break
vbase = seclist[i][2]
raw = seclist[i][4]
idatastart = raw + importsec - vbase
idata = f[idatastart:idatastart+seclist[i][1]]
i = 0
while 1:
vsa = struct.unpack('5l', idata[i*20:i*20+20])[3]
if vsa == 0:
break
sa = raw + vsa - vbase
end = string.find(f, '\000', sa)
rslt.append(f[sa:end])
i = i + 1
except IOError:
print "bindepend cannot analyze %s - file not found!"
except struct.error:
print "bindepend cannot analyze %s - error walking thru pehdr"
return rslt
def Dependencies(lTOC):
"""Expand LTOC to include all the closure of binary dependencies.
LTOC is a logical table of contents, ie, a seq of tuples (name, path).
Return LTOC expanded by all the binary dependencies of the entries
in LTOC, except those listed in the module global EXCLUDES"""
for (nm, pth) in lTOC:
fullnm = string.upper(os.path.basename(pth))
if seen.get(string.upper(nm), 0):
continue
print "analyzing", nm
seen[string.upper(nm)] = 1
dlls = getImports(pth)
for lib in dlls:
print " found", lib
if excludes.get(string.upper(lib), 0):
continue
if seen.get(string.upper(lib), 0):
continue
npth = getfullnameof(lib)
if npth:
lTOC.append((lib, npth))
else:
print " lib not found:", lib, "dependency of",
return lTOC
##if getfullnameof('dumpbin.exe') == '':
## def getImports(pth):
## return getImports2(pth)
##else:
## def getImports(pth):
## return getImports1(pth)
def getImports(pth):
"""Forwards to either getImports1 or getImports2
"""
return getImports2(pth)

View File

@@ -1,204 +0,0 @@
# copyright 1999 McMillan Enterprises, Inc.
# license: use as you please. No warranty.
#
# A subclass of Archive that can be understood
# by a C program. See uplaunch.cpp for unpacking
# from C.
import archive
import struct
import zlib
import strop
class CTOC:
"""A class encapsulating the table of contents of a CArchive.
When written to disk, it is easily read from C."""
ENTRYSTRUCT = 'iiiibc' #(structlen, dpos, dlen, ulen, flag, typcd) followed by name
def __init__(self):
self.data = []
def frombinary(self, s):
"""Decode the binary string into an in memory list.
S is a binary string."""
entrylen = struct.calcsize(self.ENTRYSTRUCT)
p = 0
while p<len(s):
(slen, dpos, dlen, ulen, flag, typcd) = struct.unpack(self.ENTRYSTRUCT,
s[p:p+entrylen])
nmlen = slen - entrylen
p = p + entrylen
(nm,) = struct.unpack(repr(nmlen)+'s', s[p:p+nmlen])
p = p + nmlen
self.data.append((dpos, dlen, ulen, flag, typcd, nm[:-1]))
def tobinary(self):
"""Return self as a binary string."""
import string
entrylen = struct.calcsize(self.ENTRYSTRUCT)
rslt = []
for (dpos, dlen, ulen, flag, typcd, nm) in self.data:
nmlen = len(nm) + 1 # add 1 for a '\0'
rslt.append(struct.pack(self.ENTRYSTRUCT+repr(nmlen)+'s',
nmlen+entrylen, dpos, dlen, ulen, flag, typcd, nm+'\0'))
return string.join(rslt, '')
def add(self, dpos, dlen, ulen, flag, typcd, nm):
"""Add an entry to the table of contents.
DPOS is data position.
DLEN is data length.
ULEN is the uncompressed data len.
FLAG says if the data is compressed.
TYPCD is the "type" of the entry (used by the C code)
NM is the entry's name."""
self.data.append((dpos, dlen, ulen, flag, typcd, nm))
def get(self, ndx):
"""return the toc entry (tuple) at index NDX"""
return self.data[ndx]
def __getitem__(self, ndx):
return self.data[ndx]
def find(self, name):
"""Return the index of the toc entry with name NAME.
Return -1 for failure."""
for i in range(len(self.data)):
if self.data[i][-1] == name:
return i
return -1
class CArchive(archive.Archive):
"""An Archive subclass that an hold arbitrary data.
Easily handled from C or from Python."""
MAGIC = 'MEI\014\013\012\013\015'
HDRLEN = 0
TOCTMPLT = CTOC
TRLSTRUCT = '8siii'
TRLLEN = 20
LEVEL = 9
def __init__(self, path=None, start=0, len=0):
"""Constructor.
PATH is path name of file (create an empty CArchive if path is None).
START is the seekposition within PATH.
LEN is the length of the CArchive (if 0, then read till EOF). """
self.len = len
archive.Archive.__init__(self, path, start)
def checkmagic(self):
"""Verify that self is a valid CArchive.
Magic signature is at end of the archive."""
#magic is at EOF; if we're embedded, we need to figure where that is
if self.len:
self.lib.seek(self.start+self.len, 0)
else:
self.lib.seek(0, 2)
filelen = self.lib.tell()
if self.len:
self.lib.seek(self.start+self.len-self.TRLLEN, 0)
else:
self.lib.seek(-self.TRLLEN, 2)
(magic, totallen, tocpos, toclen) = struct.unpack(self.TRLSTRUCT,
self.lib.read(self.TRLLEN))
if magic != self.MAGIC:
raise RuntimeError, "%s is not a valid %s archive file" \
% (self.path, self.__class__.__name__)
self.pkgstart = filelen - totallen
if self.len:
if totallen != self.len or self.pkgstart != self.start:
raise RuntimeError, "Problem with embedded archive in %s" % self.path
self.tocpos, self.toclen = tocpos, toclen
def loadtoc(self):
"""Load the table of contents into memory."""
self.toc = self.TOCTMPLT()
self.lib.seek(self.pkgstart+self.tocpos)
tocstr = self.lib.read(self.toclen)
self.toc.frombinary(tocstr)
def extract(self, name):
"""Get the contents of an entry.
NAME is an entry name.
Return the tuple (ispkg, contents).
For non-Python resoures, ispkg is meaningless (and 0).
Used by the import mechanism."""
if type(name) == type(''):
ndx = self.toc.find(name)
if ndx == -1:
return None
else:
ndx = name
(dpos, dlen, ulen, flag, typcd, nm) = self.toc.get(ndx)
self.lib.seek(self.pkgstart+dpos)
rslt = self.lib.read(dlen)
if flag == 1:
rslt = zlib.decompress(rslt)
if typcd == 'M':
return (1, rslt)
return (0, rslt)
def contents(self):
"""Return the names of the entries"""
rslt = []
for (dpos, dlen, ulen, flag, typcd, nm) in self.toc:
rslt.append(nm)
return rslt
def add(self, entry):
"""Add an ENTRY to the CArchive.
ENTRY must have:
entry[0] is name (under which it will be saved).
entry[1] is fullpathname of the file.
entry[2] is a flag for it's storage format (0==uncompressed,
1==compressed, 2==Python source format)
entry[3] is the entry's type code."""
(nm, pathnm, flag, typcd) = entry[:4]
if flag == 2:
s = open(pathnm, 'r').read()
s = s + '\n\0'
else:
s = open(pathnm, 'rb').read()
ulen = len(s)
if flag == 1:
s = zlib.compress(s, self.LEVEL)
dlen = len(s)
where = self.lib.tell()
if typcd == 'm':
if strop.find(pathnm, '.__init__.py') > -1:
typcd = 'M'
self.toc.add(where, dlen, ulen, flag, typcd, nm)
self.lib.write(s)
def save_toc(self, tocpos):
"""Save the table of contents to disk."""
self.tocpos = tocpos
tocstr = self.toc.tobinary()
self.toclen = len(tocstr)
self.lib.write(tocstr)
def save_trailer(self, tocpos):
"""Save the trailer to disk.
CArchives can be opened from the end - the trailer points
back to the start. """
totallen = tocpos + self.toclen + self.TRLLEN
trl = struct.pack(self.TRLSTRUCT, self.MAGIC, totallen,
tocpos, self.toclen)
self.lib.write(trl)
def openEmbedded(self, name):
"""Open a CArchive of name NAME embedded within this CArchive."""
ndx = self.toc.find(name)
if ndx == -1:
raise KeyError, "Member '%s' not found in %s" % (name, self.path)
(dpos, dlen, ulen, flag, typcd, nm) = self.toc.get(ndx)
if flag:
raise ValueError, "Cannot open compressed archive %s in place"
return CArchive(self.path, self.pkgstart+dpos, dlen)

View File

@@ -1,157 +0,0 @@
# copyright 1999 McMillan Enterprises, Inc.
# license: use as you please. No warranty.
#
# A subclass of Archive that can be understood
# by a C program. See uplaunch.cpp for unpacking
# from C.
#carchive_rt is a stripped down version of MEInc.Dist.carchive.
#It has had all building logic removed.
#It's purpose is to bootstrap the Python installation.
import archive_rt
import struct
import zlib
import strop
class CTOC:
ENTRYSTRUCT = 'iiiibc' #(structlen, dpos, dlen, ulen, flag, typcd) followed by name
def __init__(self):
self.data = []
def frombinary(self, s):
entrylen = struct.calcsize(self.ENTRYSTRUCT)
p = 0
while p<len(s):
(slen, dpos, dlen, ulen, flag, typcd) = struct.unpack(self.ENTRYSTRUCT,
s[p:p+entrylen])
nmlen = slen - entrylen
p = p + entrylen
(nm,) = struct.unpack(repr(nmlen)+'s', s[p:p+nmlen])
p = p + nmlen
self.data.append((dpos, dlen, ulen, flag, typcd, nm[:-1]))
## def tobinary(self):
## import string
## entrylen = struct.calcsize(self.ENTRYSTRUCT)
## rslt = []
## for (dpos, dlen, ulen, flag, typcd, nm) in self.data:
## nmlen = len(nm) + 1 # add 1 for a '\0'
## rslt.append(struct.pack(self.ENTRYSTRUCT+repr(nmlen)+'s',
## nmlen+entrylen, dpos, dlen, ulen, flag, typcd, nm+'\0'))
## return string.join(rslt, '')
##
## def add(self, dpos, dlen, ulen, flag, typcd, nm):
## self.data.append(dpos, dlen, ulen, flag, typcd, nm)
def get(self, ndx):
return self.data[ndx]
def __getitem__(self, ndx):
return self.data[ndx]
def find(self, name):
for i in range(len(self.data)):
if self.data[i][-1] == name:
return i
return -1
class CArchive(archive_rt.Archive):
MAGIC = 'MEI\014\013\012\013\015'
HDRLEN = 0
TOCTMPLT = CTOC
TRLSTRUCT = '8siii'
TRLLEN = 20
LEVEL = 9
def __init__(self, path=None, start=0, len=0):
self.len = len
archive_rt.Archive.__init__(self, path, start)
def checkmagic(self):
#magic is at EOF; if we're embedded, we need to figure where that is
if self.len:
self.lib.seek(self.start+self.len, 0)
else:
self.lib.seek(0, 2)
filelen = self.lib.tell()
if self.len:
self.lib.seek(self.start+self.len-self.TRLLEN, 0)
else:
self.lib.seek(-self.TRLLEN, 2)
(magic, totallen, tocpos, toclen) = struct.unpack(self.TRLSTRUCT,
self.lib.read(self.TRLLEN))
if magic != self.MAGIC:
raise RuntimeError, "%s is not a valid %s archive file" \
% (self.path, self.__class__.__name__)
self.pkgstart = filelen - totallen
if self.len:
if totallen != self.len or self.pkgstart != self.start:
raise RuntimeError, "Problem with embedded archive in %s" % self.path
self.tocpos, self.toclen = tocpos, toclen
def loadtoc(self):
self.toc = self.TOCTMPLT()
self.lib.seek(self.pkgstart+self.tocpos)
tocstr = self.lib.read(self.toclen)
self.toc.frombinary(tocstr)
def extract(self, name):
if type(name) == type(''):
ndx = self.toc.find(name)
if ndx == -1:
return None
else:
ndx = name
(dpos, dlen, ulen, flag, typcd, nm) = self.toc.get(ndx)
self.lib.seek(self.pkgstart+dpos)
rslt = self.lib.read(dlen)
if flag == 1:
rslt = zlib.decompress(rslt)
if typcd == 'M':
return (1, rslt)
return (0, rslt)
def contents(self):
rslt = []
for (dpos, dlen, ulen, flag, typcd, nm) in self.toc:
rslt.append(nm)
return rslt
## def add(self, entry):
## (nm, pathnm, flag, typcd) = entry[:4]
## if flag == 2:
## s = open(pathnm, 'r').read()
## s = s + '\0'
## else:
## s = open(pathnm, 'rb').read()
## ulen = len(s)
## if flag == 1:
## s = zlib.compress(s, self.LEVEL)
## dlen = len(s)
## where = self.lib.tell()
## if typcd == 'm':
## if strop.find(pathnm, '.__init__.py') > -1:
## typcd = 'M'
## self.toc.add(where, dlen, ulen, flag, typcd, nm)
## self.lib.write(s)
##
## def save_toc(self, tocpos):
## self.tocpos = tocpos
## tocstr = self.toc.tobinary()
## self.toclen = len(tocstr)
## self.lib.write(tocstr)
##
## def save_trailer(self, tocpos):
## totallen = tocpos + self.toclen + self.TRLLEN
## trl = struct.pack(self.TRLSTRUCT, self.MAGIC, totallen,
## tocpos, self.toclen)
## self.lib.write(trl)
def openEmbedded(self, name):
ndx = self.toc.find(name)
if ndx == -1:
raise KeyError, "Member '%s' not found in %s" % (name, self.path)
(dpos, dlen, ulen, flag, typcd, nm) = self.toc.get(ndx)
if flag:
raise ValueError, "Cannot open compressed archive %s in place"
return CArchive(self.path, self.pkgstart+dpos, dlen)

View File

@@ -1,178 +0,0 @@
# copyright McMillan Enterprises, 1999
import os, sys
import string
SCRIPT = 1
GSCRIPT = 2
MODULE = 3
PACKAGE = 4
PBINARY = 5
BINARY = 6
ZLIB = 7
DIRECTORY = 8
DATA = 9
_bpath = None
_ppath = None
_pcache = {}
def _locate(nm, xtrapath=None, base=None):
"""Find a file / directory named NM in likely places.
XTRAPATH is a list of paths to prepend to BASE.
If BASE is None, sys.path (as extended by packages) is used."""
ppath = base
if base is None:
ppath = _ppath
if xtrapath:
ppath = xtrapath + ppath
for pth in ppath:
fullnm = os.path.join(pth, nm)
#print " _locate trying", fullnm
if os.path.exists(fullnm):
break
else:
return ''
return fullnm
def _locatepython(name, xtrapath=None):
"""Locate a Python resource named NAME.
All of the standard file extensions will be tried.
XTRAPATH is prepended to sys.path."""
for ext in ('.py', '.pyc', '.pyw', '.pyo', '.pyd', '.dll'):
fullnm = _locate(name+ext, xtrapath)
if fullnm:
break
else:
for ext in ('.pyd', '.dll'):
fullnm = _locate(name+ext, [], _bpath)
if fullnm:
break
return fullnm
def ispackage(name):
"""Determine if NAME is the name of a package."""
if os.path.exists(os.path.join(name, '__init__.py')):
return 1
if os.path.exists(os.path.join(name, '__init__.pyc')):
return 1
if os.path.exists(os.path.join(name, '__init__.pyo')):
return 1
return 0
def idtype(fullnm):
"""Figure out what type of resource FULLNM refers to."""
if os.path.isdir(fullnm):
if ispackage(fullnm):
return PACKAGE
return DIRECTORY
ext = os.path.splitext(fullnm)[1]
if ext:
if ext == '.pyd':
return PBINARY
if ext == '.dll':
return BINARY
if ext in ('.pyc', '.pyo'):
return MODULE
if ext == '.py':
return SCRIPT
if ext == '.pyw':
return GSCRIPT
if ext == '.pyz':
return ZLIB
return DATA
def identify(name, xtrapath=None):
"""Find, and identify the type of NAME, using XTRAPATH as the
first place to look.
Return type, name and full path name.
NAME can be a logical or physical name. However, the logical
name of a Python module can easily conflict with the physical
name of something else, so beware."""
if os.path.exists(name):
fullnm = name
else:
if xtrapath is None:
xtra = []
elif id(xtrapath) in _pcache:
xtra = _pcache[id(xtrapath)]
else:
xtra = expand(xtrapath)
_pcache[id(xtrapath)] = xtra
fullnm = _locate(name, xtra)
if not fullnm:
fullnm = _locate(name, [], _bpath)
if not fullnm:
ext = os.path.splitext(name)[1]
if not ext:
fullnm = _locatepython(name, xtra)
if not fullnm:
raise ValueError, "%s not found" % name
else:
nm = name
while string.count(nm, '.'):
nm = string.replace(nm, '.', '/', 1)
fullnm = _locatepython(nm, xtra)
if fullnm:
break
else:
raise ValueError, "%s not found" % name
typ = idtype(fullnm)
nm = name
if typ in (GSCRIPT, SCRIPT, MODULE, PACKAGE, PBINARY):
dir, nm = os.path.split(fullnm)
nm = os.path.splitext(nm)[0]
if typ == SCRIPT:
if os.path.exists(fullnm+'c') or os.path.exists(fullnm+'o'):
typ = MODULE
if typ in (MODULE, PACKAGE):
while idtype(dir) == PACKAGE:
dir, lnode = os.path.split(dir)
nm = lnode+'.'+nm
elif typ == BINARY:
nm = os.path.basename(fullnm)
return typ, nm, fullnm
def expand(plist):
""" expand a list of paths (like sys.path) to include all the
directories that qualify as packages """
pkgdirs = []
for pth in plist:
os.path.walk(pth, pkgfinder, pkgdirs)
return plist + pkgdirs
def pkgfinder(pkgdirs, dir, fnms):
i = 0
while i < len(fnms):
fnm = os.path.join(dir, fnms[i])
if os.path.isdir(fnm):
if ispackage(fnm):
pkgdirs.append(fnm)
i = i + 1
else:
del fnms[i]
else:
i = i + 1
if _bpath is None:
try:
import win32api
except ImportError:
print "Cannot determine your Windows or System directories"
print "Please add them to your PATH if .dlls are not found"
_bpath = []
else:
sysdir = win32api.GetSystemDirectory()
sysdir2 = os.path.join(sysdir, '../SYSTEM')
windir = win32api.GetWindowsDirectory()
_bpath = [sysdir, sysdir2, windir]
_bpath.extend(string.split(os.environ.get('PATH', ''), ';'))
if _ppath is None:
_ppath = expand(sys.path)
def getpath():
"""Return the path that Windows will search for dlls."""
return _bpath

View File

@@ -1,138 +0,0 @@
# This code is courtesy of Thomas Heller, who
# has kindly donated it to this project.
RT_ICON = 3
RT_GROUP_ICON = 14
LOAD_LIBRARY_AS_DATAFILE = 2
import struct
class Structure:
def __init__ (self):
size = self._sizeInBytes = struct.calcsize (self._format_)
self._fields_ = list (struct.unpack (self._format_, '\000' * size))
indexes = self._indexes_ = {}
for i in range (len (self._names_)):
indexes[self._names_[i]] = i
def dump (self):
print "DUMP of", self
for name in self._names_:
if name[0] != '_':
print "%20s = %s" % (name, getattr (self, name))
print
def __getattr__ (self, name):
if name in self._names_:
index = self._indexes_[name]
return self._fields_[index]
try:
return self.__dict__[name]
except KeyError:
raise AttributeError, name
def __setattr__ (self, name, value):
if name in self._names_:
index = self._indexes_[name]
self._fields_[index] = value
else:
self.__dict__[name] = value
def tostring (self):
return apply (struct.pack, [self._format_,] + self._fields_)
def fromfile (self, file):
data = file.read (self._sizeInBytes)
self._fields_ = list (struct.unpack (self._format_, data))
class ICONDIRHEADER (Structure):
_names_ = "idReserved", "idType", "idCount"
_format_ = "hhh"
class ICONDIRENTRY (Structure):
_names_ = "bWidth", "bHeight", "bColorCount", "bReserved", "wPlanes", "wBitCount", "dwBytesInRes", "dwImageOffset"
_format_ = "bbbbhhii"
class GRPICONDIR (Structure):
_names_ = "idReserved", "idType", "idCount"
_format_ = "hhh"
class GRPICONDIRENTRY (Structure):
_names_ = "bWidth", "bHeight", "bColorCount", "bReserved", "wPlanes", "wBitCount", "dwBytesInRes", "nID"
_format_ = "bbbbhhih"
class IconFile:
def __init__ (self, path):
self.path = path
file = open (path, "rb")
self.entries = []
self.images = []
header = self.header = ICONDIRHEADER()
header.fromfile (file)
for i in range (header.idCount):
entry = ICONDIRENTRY()
entry.fromfile (file)
self.entries.append (entry)
for e in self.entries:
file.seek (e.dwImageOffset, 0)
self.images.append (file.read (e.dwBytesInRes))
def grp_icon_dir (self):
return self.header.tostring()
def grp_icondir_entries (self):
data = ""
i = 1
for entry in self.entries:
e = GRPICONDIRENTRY()
for n in e._names_[:-1]:
setattr(e, n, getattr (entry, n))
e.nID = i
i = i + 1
data = data + e.tostring()
return data
def CopyIcons_FromIco (dstpath, srcpath):
f = IconFile (srcpath)
print "Updating icons from", srcpath, "to", dstpath
import win32api #, win32con
hdst = win32api.BeginUpdateResource (dstpath, 0)
data = f.grp_icon_dir()
data = data + f.grp_icondir_entries()
win32api.UpdateResource (hdst, RT_GROUP_ICON, 1, data)
print "Writing RT_GROUP_ICON resource with %d bytes" % len (data)
i = 1
for data in f.images:
win32api.UpdateResource (hdst, RT_ICON, i, data)
print "Writing RT_ICON resource with %d bytes" % len (data)
i = i + 1
win32api.EndUpdateResource (hdst, 0)
def CopyIcons (dstpath, srcpath):
import os.path, string
index = None
try:
srcpath, index = map (string.strip, string.split (srcpath, ','))
index = int (index)
except:
pass
print "PATH, INDEX", srcpath, index
srcext = os.path.splitext (srcpath)[1]
if string.lower (srcext) == '.ico':
return CopyIcons_FromIco (dstpath, srcpath)
if index is not None:
print "Updating icons from", srcpath, ", %d to" % index, dstpath
else:
print "Updating icons from", srcpath, "to", dstpath
import win32api #, win32con
hdst = win32api.BeginUpdateResource (dstpath, 0)
hsrc = win32api.LoadLibraryEx (srcpath, 0, LOAD_LIBRARY_AS_DATAFILE)
if index is None:
grpname = win32api.EnumResourceNames (hsrc, RT_GROUP_ICON)[0]
elif index >= 0:
grpname = win32api.EnumResourceNames (hsrc, RT_GROUP_ICON)[index]
else:
grpname = -index
data = win32api.LoadResource (hsrc, RT_GROUP_ICON, grpname)
win32api.UpdateResource (hdst, RT_GROUP_ICON, grpname, data)
for iconname in win32api.EnumResourceNames (hsrc, RT_ICON):
data = win32api.LoadResource (hsrc, RT_ICON, iconname)
win32api.UpdateResource (hdst, RT_ICON, iconname, data)
win32api.FreeLibrary (hsrc)
win32api.EndUpdateResource (hdst, 0)

View File

@@ -1,487 +0,0 @@
#
# imputil.py
#
# Written by Greg Stein. Public Domain.
# No Copyright, no Rights Reserved, and no Warranties.
#
# Utilities to help out with custom import mechanisms.
#
# Additional modifications were contribed by Marc-Andre Lemburg and
# Gordon McMillan.
#
__version__ = '0.3'
# note: avoid importing non-builtin modules
import imp
import sys
import strop
import __builtin__ ### why this instead of just using __builtins__ ??
# for the DirectoryImporter
import struct
import marshal
class Importer:
"Base class for replacing standard import functions."
def install(self):
self.__chain_import = __builtin__.__import__
self.__chain_reload = __builtin__.reload
__builtin__.__import__ = self._import_hook
__builtin__.reload = self._reload_hook
######################################################################
#
# PRIVATE METHODS
#
def _import_hook(self, name, globals=None, locals=None, fromlist=None):
"""Python calls this hook to locate and import a module.
This method attempts to load the (dotted) module name. If it cannot
find it, then it delegates the import to the next import hook in the
chain (where "next" is defined as the import hook that was in place
at the time this Importer instance was installed).
"""
# determine the context of this import
parent = self._determine_import_context(globals)
# import the module within the context, or from the default context
top, tail = self._import_top_module(parent, name)
if top is None:
# the module was not found; delegate to the next import hook
return self.__chain_import(name, globals, locals, fromlist)
# the top module may be under the control of a different importer.
# if so, then defer to that importer for completion of the import.
# note it may be self, or is undefined so we (self) may as well
# finish the import.
importer = top.__dict__.get('__importer__', self)
return importer._finish_import(top, tail, fromlist)
def _finish_import(self, top, tail, fromlist):
# if "a.b.c" was provided, then load the ".b.c" portion down from
# below the top-level module.
bottom = self._load_tail(top, tail)
# if the form is "import a.b.c", then return "a"
if not fromlist:
# no fromlist: return the top of the import tree
return top
# the top module was imported by self, or it was not imported through
# the Importer mechanism and self is simply handling the import of
# the sub-modules and fromlist.
#
# this means that the bottom module was also imported by self, or we
# are handling things in the absence of a prior Importer
#
# ### why the heck are we handling it? what is the example scenario
# ### where this happens? note that we can't determine is_package()
# ### for non-Importer modules.
#
# since we imported/handled the bottom module, this means that we can
# also handle its fromlist (and reliably determine is_package()).
# if the bottom node is a package, then (potentially) import some modules.
#
# note: if it is not a package, then "fromlist" refers to names in
# the bottom module rather than modules.
# note: for a mix of names and modules in the fromlist, we will
# import all modules and insert those into the namespace of
# the package module. Python will pick up all fromlist names
# from the bottom (package) module; some will be modules that
# we imported and stored in the namespace, others are expected
# to be present already.
if self._is_package(bottom.__dict__):
self._import_fromlist(bottom, fromlist)
# if the form is "from a.b import c, d" then return "b"
return bottom
def _reload_hook(self, module):
"Python calls this hook to reload a module."
# reloading of a module may or may not be possible (depending on the
# importer), but at least we can validate that it's ours to reload
importer = module.__dict__.get('__importer__', None)
if importer is not self:
return self.__chain_reload(module)
# okay. it is ours, but we don't know what to do (yet)
### we should blast the module dict and do another get_code(). need to
### flesh this out and add proper docco...
raise SystemError, "reload not yet implemented"
def _determine_import_context(self, globals):
"""Returns the context in which a module should be imported.
The context could be a loaded (package) module and the imported module
will be looked for within that package. The context could also be None,
meaning there is no context -- the module should be looked for as a
"top-level" module.
"""
if not globals or \
globals.get('__importer__', None) is not self:
# globals does not refer to one of our modules or packages.
# That implies there is no relative import context, and it
# should just pick it off the standard path.
return None
# The globals refer to a module or package of ours. It will define
# the context of the new import. Get the module/package fqname.
parent_fqname = globals['__name__']
# for a package, return itself (imports refer to pkg contents)
if self._is_package(globals):
parent = sys.modules[parent_fqname]
assert globals is parent.__dict__
return parent
i = strop.rfind(parent_fqname, '.')
# a module outside of a package has no particular import context
if i == -1:
return None
# for a module in a package, return the package (imports refer to siblings)
parent_fqname = parent_fqname[:i]
parent = sys.modules[parent_fqname]
assert parent.__name__ == parent_fqname
return parent
def _import_top_module(self, parent, name):
"""Locate the top of the import tree (relative or absolute).
parent defines the context in which the import should occur. See
_determine_import_context() for details.
Returns a tuple (module, tail). module is the loaded (top-level) module,
or None if the module is not found. tail is the remaining portion of
the dotted name.
"""
i = strop.find(name, '.')
if i == -1:
head = name
tail = ""
else:
head = name[:i]
tail = name[i+1:]
if parent:
fqname = "%s.%s" % (parent.__name__, head)
else:
fqname = head
module = self._import_one(parent, head, fqname)
if module:
# the module was relative, or no context existed (the module was
# simply found on the path).
return module, tail
if parent:
# we tried relative, now try an absolute import (from the path)
module = self._import_one(None, head, head)
if module:
return module, tail
# the module wasn't found
return None, None
def _import_one(self, parent, modname, fqname):
"Import a single module."
# has the module already been imported?
try:
return sys.modules[fqname]
except KeyError:
pass
# load the module's code, or fetch the module itself
result = self.get_code(parent, modname, fqname)
if result is None:
return None
# did get_code() return an actual module? (rather than a code object)
is_module = type(result[1]) is type(sys)
# use the returned module, or create a new one to exec code into
if is_module:
module = result[1]
else:
module = imp.new_module(fqname)
### record packages a bit differently??
module.__importer__ = self
module.__ispkg__ = result[0]
# if present, the third item is a set of values to insert into the module
if len(result) > 2:
module.__dict__.update(result[2])
# the module is almost ready... make it visible
sys.modules[fqname] = module
# execute the code within the module's namespace
if not is_module:
exec(result[1], module.__dict__)
# insert the module into its parent
if parent:
setattr(parent, modname, module)
return module
def _load_tail(self, m, tail):
"""Import the rest of the modules, down from the top-level module.
Returns the last module in the dotted list of modules.
"""
if tail:
for part in strop.splitfields(tail, '.'):
fqname = "%s.%s" % (m.__name__, part)
m = self._import_one(m, part, fqname)
if not m:
raise ImportError, "No module named " + fqname
return m
def _import_fromlist(self, package, fromlist):
'Import any sub-modules in the "from" list.'
# if '*' is present in the fromlist, then look for the '__all__' variable
# to find additional items (modules) to import.
if '*' in fromlist:
fromlist = list(fromlist) + list(package.__dict__.get('__all__', []))
for sub in fromlist:
# if the name is already present, then don't try to import it (it
# might not be a module!).
if sub != '*' and not hasattr(package, sub):
subname = "%s.%s" % (package.__name__, sub)
submod = self._import_one(package, sub, subname)
if not submod:
raise ImportError, "cannot import name " + subname
def _is_package(self, module_dict):
"""Determine if a given module (dictionary) specifies a package.
The package status is in the module-level name __ispkg__. The module
must also have been imported by self, so that we can reliably apply
semantic meaning to __ispkg__.
### weaken the test to issubclass(Importer)?
"""
return module_dict.get('__importer__', None) is self and \
module_dict['__ispkg__']
######################################################################
#
# METHODS TO OVERRIDE
#
def get_code(self, parent, modname, fqname):
"""Find and retrieve the code for the given module.
parent specifies a parent module to define a context for importing. It
may be None, indicating no particular context for the search.
modname specifies a single module (not dotted) within the parent.
fqname specifies the fully-qualified module name. This is a (potentially)
dotted name from the "root" of the module namespace down to the modname.
If there is no parent, then modname==fqname.
This method should return None, a 2-tuple, or a 3-tuple.
* If the module was not found, then None should be returned.
* The first item of the 2- or 3-tuple should be the integer 0 or 1,
specifying whether the module that was found is a package or not.
* The second item is the code object for the module (it will be
executed within the new module's namespace). This item can also
be a fully-loaded module object (e.g. loaded from a shared lib).
* If present, the third item is a dictionary of name/value pairs that
will be inserted into new module before the code object is executed.
This provided in case the module's code expects certain values (such
as where the module was found). When the second item is a module
object, then these names/values will be inserted *after* the module
has been loaded/initialized.
"""
raise RuntimeError, "get_code not implemented"
######################################################################
#
# Simple function-based importer
#
class FuncImporter(Importer):
"Importer subclass to use a supplied function rather than method overrides."
def __init__(self, func):
self.func = func
def get_code(self, parent, modname, fqname):
return self.func(parent, modname, fqname)
def install_with(func):
FuncImporter(func).install()
######################################################################
#
# Base class for archive-based importing
#
class PackageArchiveImporter(Importer):
"Importer subclass to import from (file) archives."
def get_code(self, parent, modname, fqname):
if parent:
# if a parent "package" is provided, then we are importing a sub-file
# from the archive.
result = self.get_subfile(parent.__archive__, modname)
if result is None:
return None
if type(result) == type(()):
return (0,) + result
return 0, result
# no parent was provided, so the archive should exist somewhere on the
# default "path".
archive = self.get_archive(modname)
if archive is None:
return None
return 1, "", {'__archive__':archive}
def get_archive(self, modname):
"""Get an archive of modules.
This method should locate an archive and return a value which can be
used by get_subfile to load modules from it. The value may be a simple
pathname, an open file, or a complex object that caches information
for future imports.
Return None if the archive was not found.
"""
raise RuntimeError, "get_archive not implemented"
def get_subfile(self, archive, modname):
"""Get code from a subfile in the specified archive.
Given the specified archive (as returned by get_archive()), locate
and return a code object for the specified module name.
A 2-tuple may be returned, consisting of a code object and a dict
of name/values to place into the target module.
Return None if the subfile was not found.
"""
raise RuntimeError, "get_subfile not implemented"
class PackageArchive(PackageArchiveImporter):
"PackageArchiveImporter subclass that refers to a specific archive."
def __init__(self, modname, archive_pathname):
self.__modname = modname
self.__path = archive_pathname
def get_archive(self, modname):
if modname == self.__modname:
return self.__path
return None
# get_subfile is passed the full pathname of the archive
######################################################################
#
# Emulate the standard directory-based import mechanism
#
class DirectoryImporter(Importer):
"Importer subclass to emulate the standard importer."
def __init__(self, dir):
self.dir = dir
self.ext_char = __debug__ and 'c' or 'o'
self.ext = '.py' + self.ext_char
def get_code(self, parent, modname, fqname):
if parent:
dir = parent.__pkgdir__
else:
dir = self.dir
# pull the os module from our instance data. we don't do this at the
# top-level, because it isn't a builtin module (and we want to defer
# loading non-builtins until as late as possible).
try:
os = self.os
except AttributeError:
import os
self.os = os
pathname = os.path.join(dir, modname)
if os.path.isdir(pathname):
values = { '__pkgdir__': pathname }
ispkg = 1
pathname = os.path.join(pathname, '__init__')
else:
values = { }
ispkg = 0
t_py = self._timestamp(pathname + '.py')
t_pyc = self._timestamp(pathname + self.ext)
if t_py is None and t_pyc is None:
return None
code = None
if t_py is None or (t_pyc is not None and t_pyc >= t_py):
f = open(pathname + self.ext, 'rb')
if f.read(4) == imp.get_magic():
t = struct.unpack('<I', f.read(4))[0]
if t == t_py:
code = marshal.load(f)
f.close()
if code is None:
code = self._compile(pathname + '.py', t_py)
return ispkg, code, values
def _timestamp(self, pathname):
try:
s = self.os.stat(pathname)
except OSError:
return None
return long(s[8])
def _compile(self, pathname, timestamp):
codestring = open(pathname, 'r').read()
if codestring and codestring[-1] != '\n':
codestring = codestring + '\n'
code = __builtin__.compile(codestring, pathname, 'exec')
# try to cache the compiled code
try:
f = open(pathname + self.ext_char, 'wb')
f.write('\0\0\0\0')
f.write(struct.pack('<I', timestamp))
marshal.dump(code, f)
f.flush()
f.seek(0, 0)
f.write(imp.get_magic())
f.close()
except OSError:
pass
return code
def __repr__(self):
return '<%s.%s for "%s" at 0x%x>' % (self.__class__.__module__,
self.__class__.__name__,
self.dir,
id(self))
def _test_dir():
"Debug/test function to create DirectoryImporters from sys.path."
path = sys.path[:]
path.reverse()
for d in path:
DirectoryImporter(d).install()
######################################################################

View File

@@ -1,91 +0,0 @@
# copyright 1999 McMillan Enterprises, Inc.
# demo code - use as you please.
import os
import stat
def copyFile(srcFiles, destFile, append=0):
'''
Copy one or more files to another file. If srcFiles is a list, then all
will be concatenated together to destFile. The append flag is also valid
for single file copies.
destFile will have the mode, ownership and timestamp of the last file
copied/appended.
'''
if type(srcFiles) == type([]):
# in case we need to overwrite on the first file...
copyFile(srcFiles[0], destFile, append)
for file in srcFiles[1:]:
copyFile(file, destFile, 1)
return
mode = 'wb'
if append:
mode = 'ab'
print " ", srcFiles, "->",
input = open(srcFiles, 'rb')
if input:
print destFile
output = open(destFile, mode)
while 1:
bytesRead = input.read(8192)
if bytesRead:
output.write(bytesRead)
else:
break
input.close()
output.close()
stats = os.stat(srcFiles)
os.chmod(destFile, stats[stat.ST_MODE])
try: # FAT16 file systems have only one file time
os.utime(destFile, (stats[stat.ST_ATIME], stats[stat.ST_MTIME]))
except:
pass
try:
os.chown(destFile, stats[stat.ST_UID], stats[stat.ST_GID])
except:
pass
def ensure(dirct):
dirnm = dirct
plist = []
try:
while not os.path.exists(dirnm):
dirnm, base = os.path.split(dirnm)
if base == '':
break
plist.insert(0, base)
for d in plist:
dirnm = os.path.join(dirnm, d)
os.mkdir(dirnm)
except:
return 0
return 1
def getinstalldir(prompt="Enter an installation directory: "):
while 1:
installdir = raw_input("Enter an installation directory: ")
installdir = os.path.normpath(installdir)
if ensure(installdir):
break
else:
print installdir, "is not a valid pathname"
r = raw_input("Try again (y/n)?: ")
if r in 'nN':
sys.exit(0)
return installdir
def installCArchive(nm, basedir, suffixdir):
import carchive_rt
fulldir = os.path.join(basedir, suffixdir)
if ensure(fulldir):
pkg = carchive_rt.CArchive(nm)
for fnm in pkg.contents():
stuff = pkg.extract(fnm)[1]
outnm = os.path.join(fulldir, fnm)
if ensure(os.path.dirname(outnm)):
open(outnm, 'wb').write(stuff)
pkg = None
os.remove(nm)

View File

@@ -1,85 +0,0 @@
import os, sys, UserList
import finder, tocfilter, resource
class lTOC(UserList.UserList):
""" A class for managing lists of resources.
Should be a UserList subclass. Doh.
Like a list, but has merge(other) and filter() methods """
def __init__(self, reslist=None, filters=None):
UserList.UserList.__init__(self, reslist)
self.filters = []
if filters is not None:
self.filters = filters[:]
def prepend(self, res):
self.resources.insert(0, res)
def merge(self, other):
' merge in another ltoc, discarding dups and preserving order '
tmp = {}
for res in self.data:
tmp[res.name] = 0
for res in other:
if tmp.get(res.name, 1):
self.data.append(res)
tmp[res.name] = 0
def filter(self):
' invoke all filters '
for i in range(len(self.data)):
res = self.data[i]
if res:
for f in self.filters:
if f.matches(res):
self.data[i] = None
break
self.data = filter(None, self.data)
return self
def unique(self):
' remove all duplicate entries, preserving order '
new = self.__class__()
new.merge(self)
self.data = new.data
def toList(self):
' return self as a list of (name, path, typ) '
tmp = []
for res in self.data:
tmp.append((res.name, res.path, res.typ))
return tmp
def addFilter(self, filter):
if type(filter) == type(''):
self.filters.append(finder.makeresource(filter).asFilter())
else:
if type(filter) == type(self):
if isinstance(filter, tocfilter._Filter):
self.filters.append(filter)
elif isinstance(filter, resource.resource):
self.filters.append(filter.asFilter())
else:
raise ValueError, "can't make filter from %s", repr(filter)
else:
raise ValueError, "can't make filter from %s", repr(filter)
print " added filter", repr(self.filters[-1])
if __name__ == '__main__':
sys.path.insert(0, '.')
import finder
import pprint
s = finder.scriptresource('finder.py', './finder.py')
## pyltoc = lTOC(s.modules)
## l1 = pyltoc.toList()
## print "Raw py ltoc:", pprint.pprint(l1)
## f1 = ModFilter(['dospath', 'macpath', 'posixpath'])
## l2 = lTOC(s.modules).filter(f1).toList()
## print "Filter out dospath, macpath, posixpath:", pprint.pprint(l2)
## f2 = DirFilter(['.'])
## l3 = lTOC(s.modules).filter(f2).toList()
## print "Filter out current dir:", pprint.pprint(l3)
## f3 = StdLibFilter()
## l4 = lTOC(s.modules).filter(f3).toList()
## print "Filter out stdlib:", pprint.pprint(l4)
## #print "Filter out current dir and stdlib:", lTOC(s.modules).filter(f2, f3).toList()
binltoc = lTOC(s.binaries)
print "Raw bin ltoc:", pprint.pprint(binltoc.toList())
binltoc.addFilter('c:/winnt/system32')
pprint.pprint(binltoc.filter().toList())

View File

@@ -1,42 +0,0 @@
#import MkWrap
import imputil
import strop
import zlib
import os
import marshal
class MkImporter:
def __init__(self, db, viewnm='pylib'):
self.db = db
self.view = db.getas(viewnm+'[name:S, ispkg:I, code:M]') # an MkWrap view object
def setImportHooks(self):
imputil.FuncImporter(self.get_code).install()
def get_code(self, parent, modname, fqname):
if self.view is None:
return None
ndx = self.view.search(name=fqname)
if ndx < len(self.view):
row = self.view[ndx]
if row.name == fqname:
return (row.ispkg, marshal.loads(zlib.decompress(row.code)))
return None
def build(self, lTOC):
for entry in lTOC:
nm, fnm = entry[0], entry[1]
ispkg = os.path.splitext(os.path.basename(fnm))[0] == '__init__'
ndx = self.view.search(name=nm)
if ndx < len(self.view):
row = self.view[ndx]
if row.name != nm:
self.view.insert(ndx, {})
row = self.view[ndx]
else:
ndx = self.view.append({})
row = self.view[ndx]
row.name = nm
row.ispkg = ispkg
f = open(fnm, 'rb')
f.seek(8)
obj = zlib.compress(f.read(), 9)
row.code = obj
self.db.commit()

View File

@@ -1,436 +0,0 @@
"""Find modules used by a script, using introspection."""
import dis
import imp
import marshal
import os
import re
import string
import sys
if sys.platform=="win32":
# On Windows, we can locate modules in the registry with
# the help of the win32api package.
try:
import win32api
except ImportError:
print "The win32api module is not available - modules listed"
print "in the registry will not be found."
win32api = None
IMPORT_NAME = dis.opname.index('IMPORT_NAME')
IMPORT_FROM = dis.opname.index('IMPORT_FROM')
# Modulefinder does a good job at simulating Python's, but it can not
# handle __path__ modifications packages make at runtime. Therefore there
# is a mechanism whereby you can register extra paths in this map for a
# package, and it will be honoured.
# Note this is a mapping is lists of paths.
packagePathMap = {}
# A Public interface
def AddPackagePath(packagename, path):
paths = packagePathMap.get(packagename, [])
paths.append(path)
packagePathMap[packagename] = paths
class Module:
def __init__(self, name, file=None, path=None):
self.__name__ = name
self.__file__ = file
self.__path__ = path
self.__code__ = None
def __repr__(self):
s = "Module(%s" % repr(self.__name__)
if self.__file__ is not None:
s = s + ", %s" % repr(self.__file__)
if self.__path__ is not None:
s = s + ", %s" % repr(self.__path__)
s = s + ")"
return s
class ModuleFinder:
def __init__(self, path=None, debug=0, excludes = []):
if path is None:
path = sys.path
self.path = path
self.modules = {}
self.badmodules = {}
self.debug = debug
self.indent = 0
self.excludes = excludes
def msg(self, level, str, *args):
if level <= self.debug:
for i in range(self.indent):
print " ",
print str,
for arg in args:
print repr(arg),
print
def msgin(self, *args):
level = args[0]
if level <= self.debug:
self.indent = self.indent + 1
apply(self.msg, args)
def msgout(self, *args):
level = args[0]
if level <= self.debug:
self.indent = self.indent - 1
apply(self.msg, args)
def run_script(self, pathname):
self.msg(2, "run_script", pathname)
fp = open(pathname)
stuff = ("", "r", imp.PY_SOURCE)
self.load_module('__main__', fp, pathname, stuff)
def load_file(self, pathname):
dir, name = os.path.split(pathname)
name, ext = os.path.splitext(name)
fp = open(pathname)
stuff = (ext, "r", imp.PY_SOURCE)
self.load_module(name, fp, pathname, stuff)
def import_hook(self, name, caller=None, fromlist=None):
self.msg(3, "import_hook", name, caller, fromlist)
parent = self.determine_parent(caller)
q, tail = self.find_head_package(parent, name)
m = self.load_tail(q, tail)
if not fromlist:
return q
if m.__path__:
self.ensure_fromlist(m, fromlist)
def determine_parent(self, caller):
self.msgin(4, "determine_parent", caller)
if not caller:
self.msgout(4, "determine_parent -> None")
return None
pname = caller.__name__
if caller.__path__:
parent = self.modules[pname]
assert caller is parent
self.msgout(4, "determine_parent ->", parent)
return parent
if '.' in pname:
i = string.rfind(pname, '.')
pname = pname[:i]
parent = self.modules[pname]
assert parent.__name__ == pname
self.msgout(4, "determine_parent ->", parent)
return parent
self.msgout(4, "determine_parent -> None")
return None
def find_head_package(self, parent, name):
self.msgin(4, "find_head_package", parent, name)
if '.' in name:
i = string.find(name, '.')
head = name[:i]
tail = name[i+1:]
else:
head = name
tail = ""
if parent:
qname = "%s.%s" % (parent.__name__, head)
else:
qname = head
q = self.import_module(head, qname, parent)
if q:
self.msgout(4, "find_head_package ->", (q, tail))
return q, tail
if parent:
qname = head
parent = None
q = self.import_module(head, qname, parent)
if q:
self.msgout(4, "find_head_package ->", (q, tail))
return q, tail
self.msgout(4, "raise ImportError: No module named", qname)
raise ImportError, "No module named " + qname
def load_tail(self, q, tail):
self.msgin(4, "load_tail", q, tail)
m = q
while tail:
i = string.find(tail, '.')
if i < 0: i = len(tail)
head, tail = tail[:i], tail[i+1:]
mname = "%s.%s" % (m.__name__, head)
m = self.import_module(head, mname, m)
if not m:
self.msgout(4, "raise ImportError: No module named", mname)
raise ImportError, "No module named " + mname
self.msgout(4, "load_tail ->", m)
return m
def ensure_fromlist(self, m, fromlist, recursive=0):
self.msg(4, "ensure_fromlist", m, fromlist, recursive)
for sub in fromlist:
if sub == "*":
if not recursive:
all = self.find_all_submodules(m)
if all:
self.ensure_fromlist(m, all, 1)
elif not hasattr(m, sub):
subname = "%s.%s" % (m.__name__, sub)
submod = self.import_module(sub, subname, m)
if not submod:
raise ImportError, "No module named " + subname
def find_all_submodules(self, m):
if not m.__path__:
return
modules = {}
suffixes = [".py", ".pyc", ".pyo"]
for dir in m.__path__:
try:
names = os.listdir(dir)
except os.error:
self.msg(2, "can't list directory", dir)
continue
for name in names:
mod = None
for suff in suffixes:
n = len(suff)
if name[-n:] == suff:
mod = name[:-n]
break
if mod and mod != "__init__":
modules[mod] = mod
return modules.keys()
def import_module(self, partname, fqname, parent):
self.msgin(3, "import_module", partname, fqname, parent)
try:
m = self.modules[fqname]
except KeyError:
pass
else:
self.msgout(3, "import_module ->", m)
return m
if fqname in self.badmodules:
self.msgout(3, "import_module -> None")
self.badmodules[fqname][parent.__name__] = None
return None
try:
fp, pathname, stuff = self.find_module(partname,
parent and parent.__path__)
except ImportError:
self.msgout(3, "import_module ->", None)
return None
try:
m = self.load_module(fqname, fp, pathname, stuff)
finally:
if fp: fp.close()
if parent:
setattr(parent, partname, m)
self.msgout(3, "import_module ->", m)
return m
def load_module(self, fqname, fp, pathname, (suffix, mode, type)):
self.msgin(2, "load_module", fqname, fp and "fp", pathname)
if type == imp.PKG_DIRECTORY:
m = self.load_package(fqname, pathname)
self.msgout(2, "load_module ->", m)
return m
if type == imp.PY_SOURCE:
co = compile(fp.read()+'\n', pathname, 'exec')
elif type == imp.PY_COMPILED:
if fp.read(4) != imp.get_magic():
self.msgout(2, "raise ImportError: Bad magic number", pathname)
raise ImportError, "Bad magic number in %s", pathname
fp.read(4)
co = marshal.load(fp)
else:
co = None
m = self.add_module(fqname)
m.__file__ = pathname
if co:
m.__code__ = co
self.scan_code(co, m)
self.msgout(2, "load_module ->", m)
return m
def scan_code(self, co, m):
code = co.co_code
n = len(code)
i = 0
lastname = None
while i < n:
c = code[i]
i = i+1
op = ord(c)
if op >= dis.HAVE_ARGUMENT:
oparg = ord(code[i]) + ord(code[i+1])*256
i = i+2
if op == IMPORT_NAME:
name = lastname = co.co_names[oparg]
if lastname not in self.badmodules:
try:
self.import_hook(name, m)
except ImportError, msg:
self.msg(2, "ImportError:", str(msg))
if name not in self.badmodules:
self.badmodules[name] = {}
self.badmodules[name][m.__name__] = None
elif op == IMPORT_FROM:
name = co.co_names[oparg]
assert lastname is not None
if lastname not in self.badmodules:
try:
self.import_hook(lastname, m, [name])
except ImportError, msg:
self.msg(2, "ImportError:", str(msg))
fullname = lastname + "." + name
if fullname not in self.badmodules:
self.badmodules[fullname] = {}
self.badmodules[fullname][m.__name__] = None
else:
lastname = None
for c in co.co_consts:
if isinstance(c, type(co)):
self.scan_code(c, m)
def load_package(self, fqname, pathname):
self.msgin(2, "load_package", fqname, pathname)
m = self.add_module(fqname)
m.__file__ = pathname
m.__path__ = [pathname]
# As per comment at top of file, simulate runtime __path__ additions.
m.__path__ = m.__path__ + packagePathMap.get(fqname, [])
fp, buf, stuff = self.find_module("__init__", m.__path__)
self.load_module(fqname, fp, buf, stuff)
self.msgout(2, "load_package ->", m)
return m
def add_module(self, fqname):
if fqname in self.modules:
return self.modules[fqname]
self.modules[fqname] = m = Module(fqname)
return m
def find_module(self, name, path):
if name in self.excludes:
self.msgout(3, "find_module -> Excluded")
raise ImportError, name
if path is None:
if name in sys.builtin_module_names:
return (None, None, ("", "", imp.C_BUILTIN))
# Emulate the Registered Module support on Windows.
if sys.platform=="win32" and win32api is not None:
HKEY_LOCAL_MACHINE = 0x80000002
try:
pathname = win32api.RegQueryValue(HKEY_LOCAL_MACHINE, "Software\\Python\\PythonCore\\%s\\Modules\\%s" % (sys.winver, name))
fp = open(pathname, "rb")
# XXX - To do - remove the hard code of C_EXTENSION.
stuff = "", "rb", imp.C_EXTENSION
return fp, pathname, stuff
except win32api.error:
pass
path = self.path
return imp.find_module(name, path)
def report(self):
print
print " %-25s %s" % ("Name", "File")
print " %-25s %s" % ("----", "----")
# Print modules found
keys = self.modules.keys()
keys.sort()
for key in keys:
m = self.modules[key]
if m.__path__:
print "P",
else:
print "m",
print "%-25s" % key, m.__file__ or ""
# Print missing modules
keys = self.badmodules.keys()
keys.sort()
for key in keys:
# ... but not if they were explicitely excluded.
if key not in self.excludes:
mods = self.badmodules[key].keys()
mods.sort()
print "?", key, "from", string.join(mods, ', ')
def test():
# Parse command line
import getopt
try:
opts, args = getopt.getopt(sys.argv[1:], "dmp:qx:")
except getopt.error, msg:
print msg
return
# Process options
debug = 1
domods = 0
addpath = []
exclude = []
for o, a in opts:
if o == '-d':
debug = debug + 1
if o == '-m':
domods = 1
if o == '-p':
addpath = addpath + string.split(a, os.pathsep)
if o == '-q':
debug = 0
if o == '-x':
exclude.append(a)
# Provide default arguments
if not args:
script = "hello.py"
else:
script = args[0]
# Set the path based on sys.path and the script directory
path = sys.path[:]
path[0] = os.path.dirname(script)
path = addpath + path
if debug > 1:
print "path:"
for item in path:
print " ", repr(item)
# Create the module finder and turn its crank
mf = ModuleFinder(path, debug, exclude)
for arg in args[1:]:
if arg == '-m':
domods = 1
continue
if domods:
if arg[-2:] == '.*':
mf.import_hook(arg[:-2], None, ["*"])
else:
mf.import_hook(arg)
else:
mf.load_file(arg)
mf.run_script(script)
mf.report()
if __name__ == '__main__':
try:
test()
except KeyboardInterrupt:
print "\n[interrupt]"

View File

@@ -1,317 +0,0 @@
import os
import string
import archivebuilder
import carchive
import tocfilter
import bindepend
import finder
_cache = {}
def makeresource(name, xtrapath=None):
"""Factory function that returns a resource subclass.
NAME is the logical or physical name of a resource.
XTRAPTH is a path or list of paths to search first.
return one of the resource subclasses.
Warning - logical names can conflict; archive might return a directory,
when the module archive.py was desired."""
typ, nm, fullname = finder.identify(name, xtrapath)
fullname = os.path.normpath(fullname)
if fullname in _cache:
return _cache[fullname]
elif typ in (finder.SCRIPT, finder.GSCRIPT):
rsrc = scriptresource(nm, fullname)
elif typ == finder.MODULE:
rsrc = moduleresource(nm, fullname)
elif typ == finder.PACKAGE:
rsrc = pkgresource(nm, fullname)
elif typ in (finder.PBINARY, finder.BINARY):
rsrc = binaryresource(nm, fullname)
elif typ == finder.ZLIB:
rsrc = zlibresource(nm, fullname)
elif typ == finder.DIRECTORY:
rsrc = dirresource(nm, fullname)
else:
try:
carchive.CArchive(fullname)
except:
rsrc = dataresource(nm, fullname)
else:
rsrc = archiveresource(nm, fullname)
_cache[fullname] = rsrc
return rsrc
class resource:
""" Base class for all resources.
contents() returns of list of what's contained (eg files in dirs)
dependencies() for Python resources returns a list of moduleresources
and binaryresources """
def __init__(self, name, path, typ):
"""NAME is the logical name of the resource.
PATH is the full path to the resource.
TYP is the type code.
No editting or sanity checks."""
self.name = name
self.path = path
self.typ = typ
def __repr__(self):
return "(%(name)s, %(path)s, %(typ)s)" % self.__dict__
def contents(self):
"""A list of resources within this resource.
Overridable.
Base implementation returns [self]"""
return [self]
def dependencies(self):
"""A list of resources this resource requires.
Overridable.
Base implementation returns []"""
return []
def __cmp__(self, other):
if not isinstance(other, self.__class__):
return -1
return cmp((self.typ, self.name), (other.typ, other.name))
def asFilter(self):
"""Create a tocfilter based on self.
Pure virtual"""
raise NotImplementedError
def asSource(self):
"""Return self in source form.
Base implementation returns self"""
return self
def asBinary(self):
"""Return self in binary form.
Base implementation returns self"""
return self
class pythonresource(resource):
"""An empty base class.
Used to classify resources."""
pass
class scriptresource(pythonresource):
""" A top-level python resource.
Has (lazily computed) attributes, modules and binaries, which together
are the scripts dependencies() """
def __init__(self, name, fullname):
resource.__init__(self, name, fullname, 's')
def __getattr__(self, name):
if name == 'modules':
print "Analyzing python dependencies of", self.name, self.path
self.modules = []
self._binaries = []
nodes = string.split(self.name, '.')[:-1] # MEInc.Dist.archive -> ['MEInc', 'Dist']
for i in range(len(nodes)):
nm = string.join(nodes[:i+1], '.')
rsrc = makeresource(nm+'.__init__')
rsrc.name = nm
self.modules.append(rsrc)
for (nm, path) in archivebuilder.Dependencies(self.path):
path = os.path.normcase(os.path.abspath(path))
if os.path.splitext(path)[1] == '.py':
self.modules.append(moduleresource(nm, path))
else:
self._binaries.append(binaryresource(nm, path))
return self.modules
elif name == 'binaries':
x = self.modules
tmp = {}
for br in self._binaries:
tmp[br.name] = br
for br2 in br.dependencies():
tmp[br2.name] = br2
self.binaries = tmp.values()
return self.binaries
else:
raise AttributeError, "%s" % name
def dependencies(self):
"""Return all dependencies (Python and binary) of self."""
return self.modules + self.binaries
def asFilter(self):
"""Return a ModFilter based on self."""
return tocfilter.ModFilter([self.name])
def asSource(self):
"""Return self as a dataresource (ie, a text file wrapper)."""
r = dataresource(self.path)
r.name = apply(os.path.join, string.split(self.name, '.')[:-1]+[r.name])
return r
class moduleresource(scriptresource):
""" A module resource (differs from script in that it will generally
be worked with as a .pyc instead of in source form) """
def __init__(self, name, fullname):
resource.__init__(self, name, fullname, 'm')
def asBinary(self):
"""Return self as a dataresource (ie, a binary file wrapper)."""
r = dataresource(self.path)
r.name = os.path.basename(r.name)
r.typ = 'b'
return r
def asSource(self):
"""Return self as a scriptresource (ie, uncompiled form)."""
return scriptresource(self.name, self.path[:-1]).asSource()
class binaryresource(resource):
"""A .dll or .pyd.
dependencies() yields more binaryresources """
def __init__(self, name, fullname):
if string.find(name, '.') == -1:
pth, bnm = os.path.split(fullname)
junk, ext = os.path.splitext(bnm)
fullname = os.path.join(pth, name + ext)
resource.__init__(self, name, fullname, 'b')
self._depends = None
def dependencies(self):
"""Return a list of binary dependencies."""
if self._depends is not None:
return self._depends
self._depends = []
for (lib, path) in bindepend.Dependencies([(self.name, self.path)]):
self._depends.append(binaryresource(lib, path))
return self._depends
def asFilter(self):
"""Create a FileFilter from self."""
return tocfilter.FileFilter([self.name])
class dataresource(resource):
"""A subclass for arbitrary files. """
def __init__(self, name, fullname=None):
resource.__init__(self, name, fullname or name, 'x')
def asFilter(self):
"""Create a FileFilter from self."""
return tocfilter.FileFilter([self.name])
class archiveresource(dataresource):
"""A sublcass for CArchives. """
def __init__(self, name, fullname=None):
resource.__init__(self, name, fullname or name, 'a')
class zlibresource(dataresource):
"""A subclass for ZlibArchives. """
def __init__(self, name, fullname=None):
resource.__init__(self, name, fullname or name, 'z')
class dirresource(resource):
"""A sublcass for a directory.
Generally transformed to a list of files through
contents() and filtered by file extensions or resource type.
Note that contents() is smart enough to regard a .py and .pyc
as the same resource. """
RECURSIVE = 0
def __init__(self, name, fullname=None):
resource.__init__(self, name, fullname or name, 'd')
self._contents = None
def contents(self, prefix=''):
"""Return the list of (typed) resources in self.name"""
if self._contents is not None:
return self._contents
self._contents = []
flist = os.listdir(self.path)
for fnm in flist:
try:
bnm, ext = os.path.splitext(fnm)
if ext == '.py' and (bnm+'.pyc' in flist or bnm+'.pyo' in flist):
pass
elif ext == '.pyo' and (bnm + '.pyc' in flist):
pass
else:
rsrc = makeresource(os.path.join(self.path, fnm))
if isinstance(rsrc, pkgresource):
rsrc = self.__class__(rsrc.path)
if self.RECURSIVE:
if isinstance(rsrc, moduleresource) or isinstance(rsrc, scriptresource):
rsrc = rsrc.asSource()
fnm = os.path.basename(rsrc.path)
rsrc.name = os.path.join(prefix, fnm)
if rsrc.typ == 'd':
rsrc.RECURSIVE = 1
self._contents.extend(rsrc.contents(rsrc.name))
else:
self._contents.append(rsrc)
else:
self._contents.append(rsrc)
except ValueError, e:
raise RuntimeError, "Can't make resource from %s\n ValueError: %s" \
% (os.path.join(self.path, fnm), repr(e.args))
return self._contents
def asFilter(self):
return tocfilter.DirFilter([self.path])
class treeresource(dirresource):
"""A subclass for a directory and subdirectories."""
RECURSIVE = 1
def __init__(self, name, fullname=None):
dirresource.__init__(self, name, fullname)
class pkgresource(pythonresource):
"""A Python package.
Note that contents() can be fooled by fancy __path__ statements. """
def __init__(self, nm, fullname):
resource.__init__(self, nm, fullname, 'p')
self._contents = None
self._depends = None
def contents(self, parent=None):
"""Return a list of subpackages and modules in self."""
if self._contents is not None:
return self._contents
if parent is None:
parent = self.name
self._contents = []
cheat = treeresource(self.path)
for rsrc in cheat.contents():
if os.path.splitext(rsrc.path)[1] == '.py':
rsrc = moduleresource(string.replace(rsrc.name[:-3], os.sep, '.'),
rsrc.path)
if rsrc.name[-8:] == '__init__':
rsrc.name = rsrc.name[:-9]
elif os.path.isdir(rsrc.path):
rsrc = makeresource(rsrc.path)
else:
continue
if rsrc.name:
rsrc.name = parent + '.' + rsrc.name
else:
rsrc.name = parent
if rsrc.typ == 'm':
self._contents.append(rsrc)
elif rsrc.typ == 'p':
self._contents.extend(rsrc.contents(rsrc.name))
return self._contents
def dependencies(self):
"""Return the list of accumulated dependencies of all modules in self."""
if self._depends is not None:
return self._depends
self._depends = []
tmp = {}
for rsrc in self.contents():
for r in rsrc.dependencies():
tmp[r.name] = r
self._depends = tmp.values()
return self._depends
def asFilter(self):
"""Create a PkgFilter from self."""
return tocfilter.PkgFilter([os.path.dirname(self.path)])
if __name__ == '__main__':
s = scriptresource('finder.py', './finder.py')
print "s.modules:", s.modules
print "s.binaries:", s.binaries

View File

@@ -1,131 +0,0 @@
import os
import finder
import re
import sys
def makefilter(name, xtrapath=None):
typ, nm, fullname = finder.identify(name, xtrapath)
if typ in (finder.SCRIPT, finder.GSCRIPT, finder.MODULE):
return ModFilter([os.path.splitext(nm)[0]])
if typ == finder.PACKAGE:
return PkgFilter([fullname])
if typ == finder.DIRECTORY:
return DirFilter([fullname])
if typ in (finder.BINARY, finder.PBINARY):
return FileFilter([nm])
return FileFilter([fullname])
class _Filter:
def __repr__(self):
return '<'+self.__class__.__name__+' '+repr(self.elements)+'>'
class _NameFilter(_Filter):
""" A filter mixin that matches (exactly) on name """
def matches(self, res):
return self.elements.get(res.name, 0)
class _PathFilter(_Filter):
""" A filter mixin that matches if the resource is below any of the paths"""
def matches(self, res):
p = os.path.normcase(os.path.abspath(res.path))
while len(p) > 3:
p = os.path.dirname(p)
if self.elements.get(p, 0):
return 1
return 0
class _ExtFilter(_Filter):
""" A filter mixin that matches based on file extensions (either way) """
include = 0
def matches(self, res):
fnd = self.elements.get(os.path.splitext(res.path)[1], 0)
if self.include:
return not fnd
return fnd
class _TypeFilter(_Filter):
""" A filter mixin that matches on resource type (either way) """
include = 0
def matches(self, res):
fnd = self.elements.get(res.typ, 0)
if self.include:
return not fnd
return fnd
class _PatternFilter(_Filter):
""" A filter that matches if re.search succeeds on the resource path """
def matches(self, res):
for regex in self.elements:
if regex.search(res.path):
return 1
return 0
class ExtFilter(_ExtFilter):
""" A file extension filter.
ExtFilter(extlist, include=0)
where extlist is a list of file extensions """
def __init__(self, extlist, include=0):
self.elements = {}
for ext in extlist:
if ext[0:1] != '.':
ext = '.'+ext
self.elements[ext] = 1
self.include = include
class TypeFilter(_TypeFilter):
""" A filter for resource types.
TypeFilter(typlist, include=0)
where typlist is a subset of ['a','b','d','m','p','s','x','z'] """
def __init__(self, typlist, include=0):
self.elements = {}
for typ in typlist:
self.elements[typ] = 1
self.include = include
class FileFilter(_NameFilter):
""" A filter for data files """
def __init__(self, filelist):
self.elements = {}
for f in filelist:
self.elements[f] = 1
class ModFilter(_NameFilter):
""" A filter for Python modules.
ModFilter(modlist) where modlist is eg ['macpath', 'dospath'] """
def __init__(self, modlist):
self.elements = {}
for mod in modlist:
self.elements[mod] = 1
class DirFilter(_PathFilter):
""" A filter based on directories.
DirFilter(dirlist)
dirs may be relative and will be normalized.
Subdirectories of dirs will be excluded. """
def __init__(self, dirlist):
self.elements = {}
for pth in dirlist:
pth = os.path.normcase(os.path.abspath(pth))
self.elements[pth] = 1
class PkgFilter(_PathFilter):
"""At this time, identical to a DirFilter (being lazy) """
def __init__(self, pkglist):
#warning - pkgs are expected to be full directories
self.elements = {}
for pkg in pkglist:
pth = os.path.normcase(os.path.abspath(pkg))
self.elements[pth] = 1
class StdLibFilter(_PathFilter):
""" A filter that excludes anything found in the standard library """
def __init__(self):
pth = os.path.normcase(os.path.join(sys.exec_prefix, 'lib'))
self.elements = {pth:1}
class PatternFilter(_PatternFilter):
""" A filter that excludes if any pattern is found in resource's path """
def __init__(self, patterns):
self.elements = []
for pat in patterns:
self.elements.append(re.compile(pat))

File diff suppressed because it is too large Load Diff

View File

@@ -1,4 +1,4 @@
from direct.showbase import ElementTree as ET
import xml.etree.ElementTree as ET
class HTMLTree(ET.ElementTree):
def __init__(self, title):

View File

@@ -51,7 +51,6 @@ import traceback
import __builtin__
from StringIO import StringIO
import marshal
import ElementTree as ET
import BpDb
import unicodedata
import bisect

View File

@@ -1,71 +0,0 @@
import direct
from pandac.PandaModules import loadPrcFileData
from direct.showbase.DirectObject import DirectObject
from direct.directbase.DirectStart import *
from pandac.PandaModules import *
import direct.gui.DirectGuiGlobals as DGG
from direct.gui.DirectGui import *
from direct.task import Task
from direct.directnotify import DirectNotifyGlobal
import math
from operator import *
import ModelScreenShotGlobals
class ModelScreenShot(DirectObject):
notify = DirectNotifyGlobal.directNotify.newCategory("ModelScreenShot")
def __init__(self):
# Grab a list of models to capture screenshots of from an array in
# the globals file
self.modelsToView = ModelScreenShotGlobals.models
self.models = []
# Attach all the models listed to render and save a pointer to them
# in an array. Then hide the model.
for model in self.modelsToView:
m = loader.loadModel(model)
m.reparentTo(render)
self.models.append(m)
m.hide()
# Set a nice farplane far, far away
self.lens = base.camera.getChild(0).node().getLens()
self.lens.setFar(10000)
# Hide the cursor
self.props = WindowProperties()
self.props.setCursorHidden(0)
base.win.requestProperties(self.props)
# Method for getting the distance to an object from the camera
def getDist(obj, lens):
rad = obj.getBounds().getRadius()
fov = lens.getFov()
dist = rad / math.tan(deg2Rad(min(fov[0], fov[1]/2.0)))
return dist
# Determin the optimal camera position
def getOptCamPos(obj, dist):
cen = obj.getBounds().getCenter()
camPos = VBase3(cen.getX(), -dist, cen.getZ())
return camPos
# Generate screenshots
def generatePics():
for model in self.models:
model.show()
base.camera.setPos(getOptCamPos(model, getDist(model, self.lens)))
uFilename = model.getName().replace('.egg','.jpg')
self.notify.info("screenshot %s camera pos: %s" % (uFilename, base.camera.getPos()))
base.graphicsEngine.renderFrame()
base.screenshot(namePrefix = uFilename, defaultFilename = 0)
model.hide()
generatePics()
mss = ModelScreenShot()
run()

View File

@@ -1,6 +0,0 @@
# Replace these with the models you want to screenshot
models = [
'models/misc/smiley',
'models/misc/sphere',
'models/misc/xyzAxis'
]