polak.bib

@inproceedings{Pol76-1,
  author = {W. Polak},
  title = {Anwendung der axiomatischen {D}efinitionsmethode auf h\"{o}here {P}rogrammierspra\-chen},
  booktitle = {Programmiersprachen, 4. Fachtagung der GI},
  pages = {12--18},
  year = {1976},
  editor = {H.-J. Schneider and M. Nagl},
  volume = {1},
  series = {Informatik-Fachberichte},
  address = {Berlin},
  month = mar,
  publisher = {Springer Verlag},
  url = {http://portal.acm.org/citation.cfm?id=646610.696645}
}
@article{Pol79-1,
  author = {Wolfgang Polak},
  month = sep,
  year = {1979},
  title = {An exercise in automatic program verification},
  journal = {IEEE Transactions of Software Engineering},
  volume = {SE--5},
  number = {5},
  pages = {453--458},
  abstract = {This paper describes in some detail the computer-aided 
proof of a permutation program obtained using the Stanford Pascal verifier.
We emphasize the systematic way in which a proof can be developed from an 
intuitive understanding of the program. The paper illustrates the current state of the art 
in automatic program verification.},
  url = {http://ieeexplore.ieee.org/xpl/freeabs_all.jsp?isnumber=35912&arnumber=1702654&count=17&index=3}
}
@techreport{LGHKMOPS79-2,
  author = {David C. Luckham and Steven M. German and Friedrich W. von Henke
             and Richard A. Karp and P. W. Milne and Derek C. Oppen and
             Wolfgang Polak and William L. Scherlis},
  title = {Stanford Pascal Verifier user manual},
  institution = {Stanford University, Department of Computer Science},
  type = {Technical Report},
  number = {STAN-CS-TR-79-731},
  pages = {124},
  month = mar,
  year = {1979},
  abstract = {The Stanford PASCAL verifier is an interactive program
             verification system. It automates much of the work necessary to
             analyze a program for consistency with its documentation, and to
             give a rigorous mathematical proof of such consistency or to
             pin-point areas of inconsistency. It has been shown to have
             applications as an aid to programming, and to have potential for
             development as a new and useful tool in the production of reliable
             software.}
}
@techreport{LP80-1,
  author = {David C. Luckham and Wolfgang Polak},
  title = {{Ada} exceptions: specification and proof techniques},
  institution = {Stanford University, Department of Computer Science},
  type = {Technical Report},
  number = {STAN-CS-TR-80-789},
  pages = {20},
  month = feb,
  year = {1980},
  url = {http://www.pocs.com/Papers/CS-TR-80-789.pdf},
  abstract = {A method of documenting exception propagation and handling in Ada
             programs is proposed. Exception propagation declarations are
             introduced as a new component of Ada specifications. This permits
             documentation of those exceptions that can be propagated by a
             subprogram. Exception handlers are documented by entry assertions.
             Axioms and proof rules for Ada exceptions are given. These rules
             are simple extensions of previous rules for Pascal and define an
             axiomatic semantics of Ada exceptions. As a result, Ada programs
             specified according to the method can be analysed by formal proof
             techniques for consistency with their specifications, even if they
             employ exception propagation and handling to achieve required
             results (i.e. non-error situations). Example verifications are
             given.}
}
@article{LP80-2,
  author = {David C. Luckham and W. Polak},
  title = {{Ada} exception handling: an axiomatic approach},
  journal = {ACM Transactions on Programming Languages and Systems},
  volume = {2},
  number = {2},
  pages = {225--233},
  month = apr,
  year = {1980},
  issn = {0164-0925},
  url = {http://doi.acm.org/10.1145/357094.357100},
  abstract = {A method of documenting exception propagation and handling in Ada
programs is proposed. Exception propagation declarations are
introduced as a new component of Ada specifications, permitting
documentation of those exceptions that can be propagated by a
subprogram. Exception handlers are documented by entry
assertions. Axioms and proof rules for Ada exceptions given. These
rules are simple extensions of previous rules for Pascal and define an
axiomatic semantics of Ada exceptions. As a result, Ada programs
specified according to the method can be analyzed by formal proof
techniques for consistency with their specifications, even if they
employ exception propagation and handling to achieve required results
(i.e., nonerror situations). Example verifications are given.}
}
@techreport{Pol80-3,
  author = {Wolfgang Heinz Polak},
  month = may,
  year = {1980},
  title = {Theory of compiler specification and verification},
  number = {STAN-CS-80-802},
  institution = {Stanford University},
  address = {Stanford, CA},
  abstract = {The formal specification, design, implementation, and
                  verification of a compiler for a Pascal-like
                  language is described. All components of the
                  compilation process such as scanning, parsing, type
                  checking, and code generation are considered. The
                  implemented language contains most control
                  structures of Pascal, recursive procedures and
                  functions, and jumps. It provides user defined data
                  types including arrays, records, and pointers. A
                  simple facility for input-output is provided. The
                  target language assumes a stack machine including a
                  display mechanism to handle procedure and function
                  calls. The compiler itself is written in Pascal
                  Plus, a dialect of Pascal accepted by the Stanford
                  verifier. The Stanford verifier is used to give a
                  complete formal machine checked verification of the
                  compiler. One of the main problem areas considered
                  is the formal mathematical treatment of programming
                  languages and compilers suitable as input for
                  automated program verification systems. Several
                  technical and methodological problems of
                  mechanically verifying large software systems are
                  considered. Some new verification techniques are
                  developed, notably methods to reason about pointers,
                  fixed points, and quantification. These techniques
                  are of general importance and are not limited to
                  compiler verification. The result of this research
                  demonstrates that construction of large correct
                  programs is possible with the existing verification
                  technology. It indicates that verification will
                  become a useful software engineering tool in the
                  future. Several problem areas of current
                  verification systems are pointed out and areas for
                  future research are outlined.}
}
@article{LP80-4,
  author = {David C. Luckham and Wolfgang Polak},
  title = {A Practical Method of Documenting and Verifying {Ada} Programs with
           Packages},
  journal = {1980 SIGPlan Symposiun on the {Ada} Programing Language},
  pages = {113--122},
  publisher = {ACM SIGPlan},
  address = {New York, NY},
  month = dec,
  year = {1980},
  url = {http://doi.acm.org/10.1145/948632.948648},
  abstract = {We present a method of formal specification of Ada
programs containing packages. The method suggests concepts and
guidelines useful for giving adequate informal documentation of
packages by means of comments.The method depends on (1) the standard
inductive assertion technique for subprograms, (2) the use of history
sequences in assertions specifying the declaration and use of
packages, and (3) the addition of three categories of specifications
to Ada package declarations: (a) visible specifications, (b) boundary
specifications, (c) internal specifications.Axioms and proof rules for
the Ada package constructs (declaration, instantiation, and function
and procedure call) are given in terms of history sequences and
package specifications. These enable us to construct formal proofs of
the correctness of Ada programs with packages. The axioms and proof
rules are easy to implement in automated program checking systems. The
use of history sequences in both in formal documentation and formal
specifications and proofs is illustrated by examples.}
}
@inproceedings{Pol81-1,
  key = {Polak},
  author = {Wolfgang Polak},
  title = {Program Verification Based on Denotational Semantics},
  booktitle = {Conference Record of the Eighth ACM Symposium on Principles of
           Programming Languages},
  publisher = {ACM},
  month = jan,
  year = {1981},
  pages = {149--158},
  annote = {24 references.},
  url = {http://www.pocs.com/Papers/POPL-81.pdf},
  abstract = {A theory of partial correctness proofs is formulated in Scott's logic
computable junctions. This theory allows mechanical construction of
verification condition solely on the basis of a denotational language
definition. Extensionally these conditions, the resulting proofs, and
the required program augmentation are similar to those of Hoare style
proofs; conventional input, output, and invariant assertions in a
first order assertion language are required. The theory applies to
almost any sequential language defined by a continuation semantics;
for example, there are no restrictions on aliasing or
side-effects. Aspects of "static semantics",such as type and
declaration constraints, which are expressed in the denotational
definition are validated as part of the verification condition
generation process.}
}
@inproceedings{Pol81-2,
  author = {W. Polak},
  title = {Program Verification at Stanford - Past, Present, Future},
  booktitle = {Workshop on Artificial Intelligence},
  pages = {256--276},
  year = {1981},
  editor = {J. Siekman},
  volume = {47},
  series = {Informatik-Fachberichte},
  address = {Berlin},
  publisher = {Springer Verlag},
  url = {http://portal.acm.org/citation.cfm?id=647602.732391}
}
@book{Pol81-3,
  author = {W. Polak},
  title = {Compiler Specification and Verification},
  publisher = {Springer},
  address = {Berlin},
  year = {1981},
  isbn = {3-540-10886-6},
  volume = {124},
  series = {Lecture Notes in Computer Science},
  url = {http://www.amazon.com/gp/product/0387108866/104-5132917-1174311?n=283155}
}
@inproceedings{Pol86-1,
  author = {Wolfgang Polak},
  title = {Framework for a Knowledge-Based Programming Environment},
  booktitle = {Int'l Workshop on Programming Environments},
  year = {1986},
  volume = {244},
  series = {Lecture Notes in Computer Science},
  address = {Trondheim, Norway},
  month = jun,
  pages = {566--574},
  publisher = {Springer Verlag},
  abstract = {We develop a formal model describing the basic objects
                  and operations of the software development process.
                  This model allows us to characterize functions
                  performed by support environments and to formally
                  define the semantics of environment tools.
                  Representing information about available tools,
                  administrative procedures, and constraints in a
                  knowledge base allows automated support for many
                  software development and life-cycle activities.
                  Constraint maintenance within a knowledge base takes
                  over many routine administrative tasks; program
                  synthesis techniques can be used to generate complex
                  plans.}
}
@unpublished{Pol86-2,
  author = {Wolfgang Polak},
  title = {{AI} Approaches to Software Engineering},
  note = {Workshop on Complementary Approaches to Software Engineering, Imperial College, London},
  month = jun,
  year = {1986}
}
@inproceedings{Pol86-3,
  author = {Wolfgang Polak},
  title = {Maintainability and Reusable Program Designs},
  booktitle = {Conf.~on Software Reusability and Maintainability},
  year = {1986},
  address = {Tysons Corner, VA},
  month = sep,
  abstract = {Problems and shortcomings of the current maintenance
                  paradigm and the approach to software reuse are
                  analyzed.  We argue that knowledge-based
                  transformation techniques will provide solutions in
                  both areas.  Such techniques will enable maintenance
                  to be performed on the specification level.
                  Efficient code can be regenerated from changed
                  specifications by replaying recorded design
                  decisions.  Program transformations can be used
                  capture and represent general as well as application
                  specific programming knowledge.  Reuse of such
                  knowledge is the most flexible and general form of
                  reuse.}
}
@inproceedings{GCPJ86-4,
  author = {Allen Goldberg and Cordell Green and Wolfgang Polak and Richard
           Juellig},
  title = {Iteration in the software process},
  booktitle = {Proceedings of the ~$3^{rd}$~ International Software Process
           Workshop},
  year = {1986},
  editor = {M. Dowson},
  pages = {105--108},
  month = nov,
  abstract = {In this position paper we discuss the model of software
                  development adopted by Kestrel Institute, how
                  iteration relates to this model, what
                  knowledge-based tools have been developed that
                  support iteration and specific, ongoing research on
                  automating formal program developments.},
  url = {http://www.amazon.com/Iteration-Software-Process-Proceedings-International/dp/0818607092/ref=sr_1_126/104-6783173-7179944?ie=UTF8&s=books&qid=1176587271&sr=1-126}
}
@techreport{Pol87-1,
  author = {Wolfgang Polak},
  title = {Framework for a Knowledge-Based Programming Environment},
  institution = {Kestrel Institute},
  year = {1987},
  number = {KES.U.86.3},
  month = jun,
  abstract = {A formal model for describing the semantics of
                  operations in software engineering environments is
                  presented.  The proposed model consists of two
                  parts, a description of the data domains, and an
                  axiomatic definition of tool invocations and their
                  composition. Objects in environments are
                  characterized by their hierarchical composition and by
                  dependencies between their sub-components.  A
                  general data model is developed, allowing uniform
                  description of such diverse domains as source
                  programs, requirements, and documentation.  A
                  first-order assertion language is introduced.  The
                  semantics of tools are described with pre and post
                  conditions in this assertion language.  Further,
                  environment goals, such as ``release a system x for
                  a target y with property z'' can be stated formally.
                  An agenda is a partially ordered set of tool
                  invocations and user actions.  A language for
                  describing agendas is defined. A formal semantics
                  for this language is given by set of axiomatic proof
                  rules.  Program synthesis and planning techniques
                  can be used to generate an optimal agenda such that
                  its precondition is satisfied in the current state
                  and its post condition implies a given goal
                  statement.  Combined with a knowledge base this
                  mechanism can serve as the basis for the development
                  of sophisticated environments that provide automated
                  support for many software development and life-cycle
                  activities.}
}
@techreport{JLGP87-2,
  author = {Richard Jullig and Peter Ladkin and Li-Mei Gilham and Wolfgang Polak},
  title = {{KBSA} Project Management Assistant Final Technical Report (2 vols)},
  institution = {Rome Air Development Center},
  year = {1987},
  number = {RADC-TR-87-78},
  month = jul
}
@misc{JPLG87-3,
  author = {R. Jullig and W. Polak and P. Ladkin and L. Gilham},
  title = {Knowledge-Based Software Project Management},
  howpublished = {Technical Report KES.U.87.3, Kestrel Institute},
  year = {1987}
}
@misc{Pol88-1,
  author = {W. Polak},
  title = {A Technique for Defining Predicate Transformers},
  howpublished = {Technical Report 17-4, Odyssey Research Associates, Ithaca, NY, October},
  year = {1988}
}
@misc{Pol89-1,
  author = {W. Polak},
  title = {Predicate transformer semantics for {Ada}},
  howpublished = {Technical Report 89-39, Odyssey Research Associates, September},
  year = {1989}
}
@article{GMP90-1,
  author = {David Guaspari and Carla Marceau and Wolfgang Polak},
  title = {Formal Verification of {Ada} Programs},
  journal = {IEEE Transactions on Software Engineering},
  volume = {16},
  number = {9},
  month = sep,
  year = {1990},
  pages = {1058--1075},
  annote = {Penelope verification editor and Larch/Ada. 35 references.},
  url = {http://csdl.computer.org/comp/trans/ts/1990/09/e1058abs.htm},
  abstract = {The Penelope verification editor and its formal basis are
                  described. Penelope is a prototype system for the
                  interactive development and verification of programs
                  that are written in a rich subset of sequential
                  Ada. Because it generates verification conditions
                  incrementally, Penelope can be used to develop a
                  program and its correctness proof in concert. If an
                  already-verified program is modified, one can
                  attempt to prove the modified version by replaying
                  and modifying the original sequence of proof
                  steps. Verification conditions are generated by
                  predicate transformers whose logical soundness can
                  be proven by establishing a precise formal
                  connection between predicate transformation and
                  denotational definitions in the style of
                  continuation semantics. Penelope's specification
                  language, Larch/Ada, belongs to the family of Larch
                  interface languages. It scales up properly, in the
                  sense that one can demonstrate the soundness of
                  decomposing an implementation hierarchically and
                  reasoning locally about the implementation of each
                  node in the hierarchy.}
}
@misc{GP91,
  author = {H. Graves and W. Polak},
  title = {Common Intermediate Design Language Overview},
  howpublished = {Lockheed Palo Alto Research Laboratory},
  year = {1991}
}
@incollection{GMP92-1,
  author = {David Guaspari and Carla Marceau and Wolfgang Polak},
  title = {Formal Verification of {Ada} Programs},
  editor = {Ursala Martin and Jeanete M. Wing},
  booktitle = {First International Workshop on Larch, Dedham 1992},
  publisher = {Springer-Verlag},
  year = {1992},
  pages = {104--141},
  annote = {35 references}
}
@inproceedings{GP92-2,
  author = {W.H. Graves and W. Polak},
  title = {Common Intermediate Design Language},
  booktitle = {Hawaii International Conference on System Sciences},
  year = {1992},
  month = jan,
  url = {http://ieeexplore.ieee.org/iel2/378/4717/00183264.pdf?isnumber=&arnumber=183264},
  abstract = {The Common Intermediate Design Language (CIDL) is a high-level
                  executable system design language intended for the
                  evolutionary prototyping of large distributed
                  software systems.  CIDL was designed to be the
                  target language for code synthesis from high-level
                  system descriptions and the source language for
                  translation in to Ada.  The resulting design is a
                  typed language with higher-order functions,
                  polymorphism, and concurrency constructs.  The
                  language uses types as a representation of formal
                  specifications.}
}
@inproceedings{MP92-3,
  author = {Carla Marceau and Wolfgang Polak},
  title = {Modular Verification of {Ada} Library Units},
  booktitle = {Seventh Annual Conference on Computer Assurance},
  year = {1992},
  address = {Gaithersburg},
  month = jun,
  url = {http://ieeexplore.ieee.org/iel2/636/6057/00235764.pdf?isnumber=&arnumber=235764},
  abstract = {Modular verification of Ada library units enables programmers to
                  write and verify small program units and to compose
                  them with minimal additional effort into larger
                  correct programs. Penelope is a prototype
                  verification environment for Ada that supports
                  separate verification of program units and their
                  composition. The authors have extended Penelope to
                  enable verification of larger Ada programs,
                  consisting of multiple compilation units. They
                  discuss two major issues that arise from the
                  composition of program modules. The first is
                  ensuring that the composition itself is correct,
                  that is, that assumptions made by one module about
                  another indeed hold. The elaboration of Ada packages
                  poses new problems, which are described along with
                  the solution adopted. A novel technique for reducing
                  the amount of annotation required from the user is
                  described. The second issue is maintaining
                  consistency between the various modules without
                  incurring excessive overhead. The way in which a set
                  of modules is kept consistent depends on the
                  structure of the language. The method, called
                  separate verification, is closely modeled on the
                  technique of separate compilation used in Ada. How
                  Ada techniques can be adapted for a verification
                  environment is discussed}
}
@misc{GMPS95-1,
  author = {David Guaspari and John McHugh and Wolfgang Polak and Mark Saaltink},
  title = {Towards a Formal Semantics for {Ada} {9X}},
  howpublished = {NASA Contractor Report 195037},
  month = mar,
  year = {1995},
  url = {http://www.pocs.com/Papers/Ada-oct94.pdf}
}
@inproceedings{PNB95-2,
  author = {Wolfgang Polak and Lester D. Nelson and Timothy W. Bickmore},
  title = {Reengineering {IMS} Databases to Relational Systems},
  booktitle = {7th annual Software Technology Conference},
  year = {1995},
  address = {Salt Lake City},
  month = apr,
  note = {published on CDROM}
}
@techreport{RP98-1,
  author = {Eleanor Rieffel and Wolfgang Polak},
  title = {Introduction to Quantum Computing for Non-Physicists},
  institution = {FX Palo Alto Laboratory},
  year = {1998},
  number = {TR-98-044},
  month = aug,
  abstract = {Richard Feynman's observation that quantum mechanical
                  effects could not be simulated efficiently on a
                  computer led to speculation that computation in
                  general could be done more efficiently if it used
                  quantum effects. This speculation appeared justified
                  when Peter Shor described a polynomial time quantum
                  algorithm for factoring integers.  In quantum
                  systems, the computational space increases
                  exponentially with the size of the system which
                  enables exponential parallelism. This parallelism
                  could lead to exponentially faster quantum
                  algorithms than possible classically. The catch is
                  that accessing the results, which requires
                  measurement, proves tricky and requires new
                  non-traditional programming techniques.  The aim of
                  this paper is to guide computer scientists and other
                  non-physicists through the conceptual and notational
                  barriers that separate quantum computing from
                  conventional computing. We introduce basic
                  principles of quantum mechanics to explain where the
                  power of quantum computers comes from and why it is
                  difficult to harness. We describe quantum
                  cryptography, teleportation, and dense
                  coding. Various approaches to harnessing the power
                  of quantum parallelism are explained, including
                  Shor's algorithm, Grover's algorithm, and Hogg's
                  algorithms. We conclude with a discussion of quantum
                  error correction.},
  url = {http://xxx.lanl.gov/abs/quant-ph/9809016}
}
@inproceedings{Pol98-2,
  author = {Wolfgang Polak},
  title = {Formal Methods in Practice},
  booktitle = {Proceedings of The 1998 ARO/\-ONR/\-NSF/\-DARPA Monterey Workshop on Engineering Automation for Computer Based Systems},
  year = {1998},
  number = {NPS-CS-99-00},
  address = {Monterey, California},
  series = {Naval Postgraduate School}
}
@article{HMPR99-1,
  author = {Tad Hogg and Carlos Mochon and Wolfgang Polak and Eleanor Rieffel},
  title = {Tools for Quantum Algorithms},
  journal = {International Journal of Modern Physics C},
  year = {1999},
  volume = {10},
  number = {7},
  pages = {1347-1361},
  url = {http://arxiv.org/abs/quant-ph/9811073},
  abstract = {We present efficient implementations of a number of operations for
quantum computers. These include controlled phase adjustments of the
amplitudes in a superposition, permutations, approximations of
transformations and generalizations of the phase adjustments to block
matrix transformations. These operations generalize those used in
proposed quantum search algorithms.}
}
@article{RP00-1,
  author = {Eleanor Rieffel and Wolfgang Polak},
  title = {An Introduction to Quantum Computing for Non-Physicists},
  journal = {ACM Computing Surveys},
  year = {2000},
  number = {3},
  volume = {32},
  month = sep,
  pages = {300--335},
  url = {http://doi.acm.org/10.1145/367701.367709},
  abstract = {Richard Feynman's observation that quantum mechanical effects could
not be simulated efficiently on a computer led to speculation that
computation in general could be done more efficiently if it used
quantum effects. This speculation appeared justified when Peter Shor
described a polynomial time quantum algorithm for factoring
integers. In quantum systems, the computational space increases
exponentially with the size of the system which enables exponential
parallelism. This parallelism could lead to exponentially faster
quantum algorithms than possible classically. The catch is that
accessing the results, which requires measurement, proves tricky and
requires new non-traditional programming techniques. The aim of this
paper is to guide computer scientists and other non-physicists through
the conceptual and notational barriers that separate quantum computing
from conventional computing. We introduce basic principles of quantum
mechanics to explain where the power of quantum computers comes from
and why it is difficult to harness. We describe quantum cryptography,
teleportation, and dense coding. Various approaches to harnessing the
power of quantum parallelism are explained, including Shor's
algorithm, Grover's algorithm, and Hogg's algorithms. We conclude with
a discussion of quantum error correction.}
}
@article{RP00-2,
  author = {Eleanor Rieffel and Wolfgang Polak},
  title = {Introduction to Quantum Computing (in {R}ussian)},
  journal = {Quantum Computers and Computing},
  year = {2000},
  number = {1},
  volume = {1},
  pages = {4--57},
  note = {R\&C Dynamics, Moscow},
  url = {http://www.pocs.com/qc/qc-tut-ru.pdf}
}
@inproceedings{PGPRWB00-3,
  author = {Patrick Chiu and Andreas Girgensohn and Wolfgang Polak and Eleanor Rieffel and Lynn Wilcox and Forrest H. Bennett III},
  title = {A Genetic Segmentation Algorithm for Image Data Streams and Video},
  booktitle = {Genetic and Evolutionary Computation Conference},
  year = {2000},
  address = {Las Vegas, NV},
  url = {http://www.pocs.com/Papers/FXPAL-PR-00-115.pdf},
  abstract = {We describe a genetic segmentation algorithm for image data streams
and video. This algorithm operates on segments of a string
representation. It is similar to both classical genetic algorithms
that operate on bits of a string and genetic grouping algorithms that
operate on subsets of a set. It employs a segment fair crossover
operation. For evaluating segmentations, we define similarity
adjacency functions, which are extremely expensive to optimize with
traditional methods. The evolutionary nature of genetic algorithms
offers a further advantage by enabling incremental
segmentation. Applications include browsing and summarizing video and
collections of visually rich documents, plus a way of adapting to user
access patterns. }
}
@inproceedings{PGPRW00-4,
  author = {Patrick Chiu and Andreas Girgensohn and Wolfgang Polak and Eleanor Rieffel and Lynn Wilcox},
  title = {A Genetic Algorithm for Video Segmentation and Summarization},
  booktitle = {IEEE International Conference on Multimedia and Expo},
  year = {2000},
  address = {New York, NY},
  organization = {IEEE},
  url = {http://www.pocs.com/Papers/FXPAL-PR-00-114.pdf},
  abstract = {We describe a genetic segmentation algorithm for video. This
algorithm operates on segments of a string representation. It is
similar to both classical genetic algorithms that operate on bits of a
string and genetic grouping algorithms that operate on subsets of a
set. For evaluating segmentations, we define similarity adjacency
functions, which are extremely expensive to optimize with traditional
methods. The evolutionary nature of genetic algorithms offers a
further advantage by enabling incremental segmentation. Applications
include video summarization and indexing for browsing, plus adapting
to user access patterns.}
}
@manual{PoPat01-1,
  title = {Method and System for Constructing Adaptive and Resilient Software.},
  author = {Wolfgang Polak},
  organization = {US Patent 6,226,627 B1},
  month = {May},
  year = 2001,
  abstract = {A dependency action system uses redundant sets of
                  dynamically reconfigurable functional components to
                  achieve robustness and fault tolerance, and to
                  achieve self-optimization by learning and planning
                  techniques that use time-stamps and or computation
                  stamps as a key indicator. The dependency action
                  system is based on functional components, or
                  actions, which act on data values that are stored in
                  stamped storage locations. Data is read and written
                  to these storage locations, updating the stamps as
                  appropriate. The execution of an action is
                  controlled by the stamps of its enabling and
                  disabling storage locations. The dependency action
                  system specifies an action as enabled if new data
                  has arrived in the enabling storage
                  locations. Updating the stamp of the disabling
                  storage locations disables the action. If an
                  alternative action succeeds and produces a value,
                  the other alternative actions become disabled. If
                  one action fails to produce a value to a storage
                  location, other alternative actions may still be
                  enabled and can be executed. Thus, the dependency
                  action system supports automatic recovery from
                  failure of an individual action. The dependency
                  action system accumulates statistical information
                  about the behavior of the actions, which includes
                  the probability that a particular disabling storage
                  location will be updated by an action and the
                  average cost of an action. The dependency action
                  system uses this information to plan a sequence of
                  action executions that most likely leads to the
                  cheapest solution of a given task.},
  url = {http://patft.uspto.gov/netacgi/nph-Parser?Sect1=PTO1&Sect2=HITOFF&d=PALL&p=1&u=/netahtml/srchnum.htm&r=1&f=G&l=50&s1=6,226,627.WKU.&OS=PN/6,226,627&RS=PN/6,226,627}
}
@article{SCP02-1,
  author = {Wolfgang Polak},
  title = {Formal Methods in Practice},
  journal = {Science of Computer Programming},
  year = 2002,
  volume = 42,
  number = 1,
  pages = {75-85},
  month = {Jan},
  abstract = {Technology transfer from academic research to industrial practice is
hampered by social, political, and economic problems more than by
technical issues. This paper describes one instance of successful
technology transfer based on a special-purpose language and an
associated translation tool tailored to the customer's needs. The key
lesson to be learned from this example is that mathematical formalisms
must be transparent to the user. Formalisms can be effectively
employed if they are represented by tools that fit into existing work
processes. It is suggested that special-purpose, domain-specific
languages and their translators are an important vehicle to transition
advanced technology to practice. This approach enables domain experts
to solve problems using familiar terminology. It enables engineers of
all disciplines to utilize computers without becoming software
engineers. In doing so, we not only mitigate the chronic shortage of
qualified software personnel but also simplify the problem of
requirements analysis and specification. }
}
@techreport{AP04-1,
  author = {Brian Arthur and Wolfgang Polak},
  title = {The Evolution of Technology within a Simple Computer Model},
  institution = {Santa Fe Institute},
  year = {2004},
  number = {},
  month = dec,
  url = {http://www.pocs.com/Papers/SFI-04-12-042.pdf},
  abstract = {New technologies are constructed from technologies that previously
exist, and in turn these new technologies offer themselves as possible
components---building blocks---for the construction of further new
technologies. Thus in 1912 the amplifier circuit was constructed from
the already existing triode vacuum tube in combination with other
circuit components. It in turn made possible the oscillator and the
heterodyne mixer; which together with other standard components made
possible continuous-wave radio transmission. The collective of
technology in this way forms a network of elements where novel
elements are created from existing ones and where more complicated
elements evolve from simpler ones. We model this phenomenon of
technology creating itself by setting up an artificial system in which
new elements (logic circuits) build themselves from simpler existing
elements, and we study the properties of the resulting build-out.}
}
@manual{CGLPSP04-2,
  title = {Systems and methods for creating an interactive 3d visualization of indexed media},
  author = {Patrick Chiu and Andreas Girgensohn and Surapong Lertsithichai and Wolfgang Polak and Frank Shipman},
  organization = {US Patent, US 20080104546 A1},
  year = 2008
}
@manual{CGPRWBP04-3,
  title = {A genetic algorithm for summarizing image document streams and video key frame selection},
  author = {Patrick Chiu and Andreas Girgensohn and Wolfgang Polak and Eleanor Rieffel and Lynn Wilcox and Forrest Bennett},
  organization = {US Patent 6,819,795},
  month = nov,
  year = 2004,
  abstract = {A method, information system, and computer-readable
                  medium is provided for segmenting a plurality of
                  data, such as multimedia data, and in particular an
                  image document stream. Segment boundary points may
                  be used for retrieving and/or browsing the plurality
                  of data. Similarly, segment boundary points may be
                  used to summarize the plurality of data. Examples of
                  image document streams include video, PowerPoint
                  slides, and NoteLook pages. A genetic method having
                  a fitness or evaluation function using information
                  retrieval concepts, such as importance and
                  precedence, is used to obtain segment boundary
                  points. The genetic method is able to evaluate a
                  large amount of data in a cost effective manner. The
                  genetic method is also able to run incrementally on
                  streaming video and adapt to usage patterns by
                  considering frequently accessed images.},
  url = {http://patft.uspto.gov/netacgi/nph-Parser?Sect1=PTO1&Sect2=HITOFF&d=PALL&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.htm&r=1&f=G&l=50&s1=6,819,795.PN.&OS=PN/6,819,795&RS=PN/6,819,795}
}
@inproceedings{CGLPS05-1,
  author = {Patrick Chiu and Andreas Girgensohn and Surapong Lertsithichai and Wolfgang Polak and Frank Shipman},
  title = {MediaMetro: Browsing Multimedia Document Collections with a 3D City Metaphor},
  booktitle = {ACM Multimedia 2005, Technical Demonstrations},
  year = {2006},
  month = nov,
  url = {http://www.pocs.com/Papers/FXPAL-PR-05-334.pdf},
  abstract = {The MediaMetro application provides an interactive 3D visualization of
multimedia document collections using a city metaphor. The directories
are mapped to city layouts using algorithms similar to treemaps. Each
multimedia document is represented by a building and visual summaries
of the different constituent media types are rendered onto the sides
of the building. From videos, Manga storyboards with keyframe images
are created and shown on the façade; from slides and text, thumbnail
images are produced and subsampled for display on the building
sides. The images resemble windows on a building and can be selected
for media playback. To support more facile navigation between high
overviews and low detail views, a novel swooping technique was
developed that combines altitude and tilt changes with zeroing in on a
target. }
}
@manual{PAPat05-2,
  title = {System and Method for Automatic Design of Components in Libraries},
  author = {Wolfgang Polak and W.~Brian Arthur},
  organization = {US Patent 7,711,674 B2},
  month = may,
  year = 2007
}
@article{AP06-1,
  author = {W.~Brian Arthur and Wolfgang Polak},
  title = {The Evolution of Technology within a Simple Computer Model},
  journal = {Complexity},
  publisher = {Wiley},
  year = {2006},
  volume = 11,
  number = 5,
  pages = {23-31},
  month = {May},
  url = {http://dx.doi.org/10.1002/cplx.v11:5},
  abstract = {Technology -- the collection of devices and methods available to
human society -- evolves by constructing new devices and methods
from ones that previously exist, and in turn offering these as
possible components -- building blocks -- for the construction of
further new devices and elements. The collective of technology in this
way forms a network of elements where novel elements are created from
existing ones and where more complicated elements evolve from simpler
ones. We model this evolution within a simple artificial system on the
computer. The elements in our system are logic circuits. New elements
are formed by combination from simpler existing elements (circuits),
and if a novel combination satisfies one of a set of needs, it is
retained as a building block for further combination. We study the
properties of the resulting build out. We find that our artificial
system can create complicated technologies (circuits), but only by
first creating simpler ones as building blocks. Our results mirror
Lenski et al.'s: that complex features can be created in biological
evolution only if simpler functions are first favored and act as
stepping stones. We also find evidence that the resulting collection
of technologies exists at self-organized criticality. }
}
@inproceedings{SCHBP06-2,
  author = {Xiaohua Sun and Patrick Chiu and Jeffrey Huang and Maribeth Back and Wolfgang Polak},
  title = {Implicit Brushing and Target Snapping: Data Exploration and Sense-making on Large Displays},
  booktitle = {Proceedings of AVI},
  year = {2006},
  month = may,
  publisher = {ACM Press},
  url = {http://www.pocs.com/Papers/FXPAL-PR-06-368.pdf},
  abstract = {During grouping tasks for data exploration and sense-making, the
criteria are normally not well-defined. When users are bringing
together data objects thought to be similar in some way, implicit
brushing continually detects for groups on the freeform workspace,
analyzes the groups\u2019 text content or metadata, and draws
attention to related data by displaying visual hints and
animation. This provides helpful tips for further grouping, group
meaning refinement and structure discovery. The sense-making process
is further enhanced by retrieving relevant information from a database
or network during the brushing. Closely related to implicit brushing,
target snapping provides a useful means to move a data object to one
of its related groups on a large display. Natural dynamics and smooth
animations also help to prevent distractions and allow users to
concentrate on the grouping and thinking tasks. Two different
prototype applications, note grouping for brainstorming and photo
browsing, demonstrate the general applicability of the technique.}
}
@inproceedings{LP06-3,
  author = {Cheng-Jia Lai and Wolfgang Polak},
  title = {A Collaborative Approach to Stochastic Load Balancing with Networked Queues of Autonomous Service Clusters},
  booktitle = {MobCops 2006 Workshop},
  year = {2006},
  organization = {IEEE/ACM},
  address = {Atlanta, Georgia},
  month = nov,
  abstract = {Load balancing has been an increasingly important issue for handling
computational intensive tasks in a distributed system such as in Grid
and cluster computing. In such systems, multiple server instances are
installed for handling requests from client applications, and each
request (or task) typically needs to stay in a queue before an
available server is assigned to process it. In this paper, we propose
a high-performance queueing method for implementing a shared queue for
collaborative clusters of servers. Each cluster of servers maintains a
local queue and queues of different clusters are networked to form a
unified (or shared) queue that may dispatch tasks to all available
servers. We propose a new randomized algorithm for forwarding requests
in an overcrowded local queue to a networked queue based on load
information of the local and neighboring clusters. The algorithm
achieves both load balancing and locality awareness. },
  url = {http://www.fxpal.com/publications/FXPAL-PR-06-384.pdf}
}
@manual{PoPat07-1,
  title = {A Method for Automatically Generating Provably Correct Code},
  author = {Wolfgang Polak},
  organization = {US Patent, 7,243,086},
  year = 2007,
  abstract = {A provably correct computer program can be generated
                  using genetic programming techniques. A desired
                  behavior is used to define a formal
                  specification. An initial population of programs is
                  created where each program has a meaning that can be
                  defined using a formalization technique. A fitness
                  function is applied to measure a distance between
                  the meaning of a program, i.e., its actual behavior,
                  and the specification. Any program having a zero
                  value as the measure of distance between the meaning
                  of the program and the specification is determined
                  to be provably correct. After the fitness of some or
                  all of the programs in the current generation of
                  programs has been computed, a provably correct
                  program has not yet been found in the current
                  generation, mutation and/or crossover techniques are
                  performed on at least some of the fittest
                  individuals in the current generation to create the
                  programs of a next generation.},
  url = {http://patft.uspto.gov/netacgi/nph-Parser?Sect1=PTO1&Sect2=HITOFF&d=PALL&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.htm&r=1&f=G&l=50&s1=7243086.PN.&OS=PN/7243086&RS=PN/7243086}
}
@inproceedings{RPRT08-1,
  author = {Volker Roth and Wolfgang Polak and Eleanor Rieffel and Thea Turner},
  title = {Simple and Effective Defense Against Evil Twin Access Points},
  booktitle = {WiSec '08: Proceedings of the first ACM conference on Wireless network security},
  pages = {220-235},
  year = {2008},
  isbn = {978-1-59593-814-5},
  location = {Alexandria, VA, USA},
  publisher = {ACM},
  address = {New York, NY, USA},
  abstract = {Wireless networking is becoming widespread in many
	      public places such as cafes. Unsuspecting users may
	      become victims of attacks based on ``evil twin''
	      access points. These rogue access points are
	      operated by criminals in an attempt to launch
	      man-in-the-middle attacks. We present a simple
	      protection mechanism against binding to an evil
	      twin. The mechanism leverages short authentication
	      string protocols for the exchange of cryptographic
	      keys. The short string verification is performed by
	      encoding the short strings as a sequence of colors,
	      rendered sequentially by the user's device and by
	      the designated access point of the cafe. The access
	      point must have a light capable of showing two
	      colors and must be mounted prominently in a position
	      where users can have confidence in its
	      authenticity. We conducted a usability study with
	      patrons in several cafes and participants found our
	      protection mechanism very usable. },
  url = {http://www.fxpal.com/publications/FXPAL-PR-08-428.pdf}
}
@manual{PoPat08-1,
  title = {Usable and Secure Portable Storage},
  author = {D.M.~Hilbert and D.A.~Billsus and J.E.~Adcock and W.~Polak and L.~Denoue and E.G.~Rieffel},
  organization = {US Patent 0114990 A1},
  month = may,
  year = 2008
}
@inproceedings{CHBDDPS2008,
  title = {mTable: browsing photos and videos on a tabletop
		 system},
  author = {Patrick Chiu and Jeffrey Huang and Maribeth Back and
		 Nicholas Diakopoulos and John Doherty and Wolfgang
		 Polak and Xiaohua Sun},
  bibdate = {2008-11-24},
  bibsource = {DBLP,
		 http://dblp.uni-trier.de/db/conf/mm/mm2008.html#ChiuHBDDPS08},
  booktitle = {Proceedings of the 16th International Conference on
		 Multimedia 2008, Vancouver, British Columbia, Canada,
		 October 26-31, 2008},
  publisher = {ACM},
  year = {2008},
  editor = {Abdulmotaleb El-Saddik and Son Vuong and Carsten
		 Griwodz and Alberto Del Bimbo and K. Sel{\c c}uk Candan
		 and Alejandro Jaimes},
  isbn = {978-1-60558-303-7},
  pages = {1107--1108},
  url = {http://www.youtube.com/watch?v=JdLKvfDRqj0&feature=channel_page},
  abstract = {In this video demo, we present mTable, a multimedia
                  tabletop system for browsing photo and video
                  collections. We have developed a set of applications
                  for visualizing and exploring photos, a board game
                  for labeling photos, and a 3D cityscape metaphor for
                  browsing videos. The system is suitable for use in a
                  living room or office lounge, and can support
                  multiple displays by visualizing the collections on
                  the tabletop and showing full-size images and videos
                  on another flat panel display in the room.}
}
@book{EP11-1,
  author = {Eleanor Rieffel and Wolfgang Polak},
  title = {Quantum Computing, a Gentle Introduction},
  publisher = {MIT Press},
  address = {Cambridge, MA},
  year = {2011},
  isbn = {0-262-01506-4},
  url = {http://mitpress.mit.edu/catalog/item/default.asp?ttype=2&tid=12555}
}
@manual{PoPat12-1,
  title = {Annealing Algorithm for Non-Rectangular shaped Stained Glass Collages},
  author = {Kazumasa Murai and Patrick Chiu and Wolfgang Polak and Andreas Girgensohn and Qiong Liu},
  organization = {US Patent 8,144,919 B2},
  month = {Mar},
  year = 2012,
  abstract = {The present invention relates to a method to make effective
use of non rectangular display space for displaying a collage.
In an embodiment of the invention, a heterogeneous set of
images can be arranged to display the region of interest of the
images to avoid overlapping regions of interest.  The background
gaps between the regions of interest can be filled by
extending the regions of interest using a Voronoi technique.
This produces a stained class effect for the collage. In an
embodiment of the present invention, the technique can be 
applied to irregular shapes including circular shapes with a
hole in the middle.  In an embodiment of the present invention,
the technique can be used to print labels for disks.},
  url = {http://patft.uspto.gov/netacgi/nph-Parser?Sect1=PTO1&Sect2=HITOFF&d=PALL&p=1&u=%2Fnetahtml%2FPTO%2Fsrchnum.htm&r=1&f=G&l=50&s1=8,144,919.PN.&OS=PN/8,144,919&RS=PN/8,144,919}
}
@manual{PoPat13-1,
  title = {System and Method for Human Assisted Secure Information Exchange},
  author = {Volker Roth and Wolfgang Polak and Eleanor Rieffel},
  organization = {US Patent 8,429,405 B2},
  month = {Apr},
  year = 2013
}
@manual{PoPat14-1,
  title = {Biologically based chamber matching},
  author = {Sanjeev Kaushal and Kenji Sugishima and Sukesh Janubhai Patel and Robert Filman and Wolfgang Polak and Orion Wolfe and Jessie Burger},
  organization = {US Patent 8,723,869 B2},
  month = {May},
  year = 2014
}
@manual{PoPat17-1,
  title = {Method and apparatus for autonomous tool parameter impact identification system for semiconductor manufacturing},
  author = {Sanjeev Kaushal and Sukesh Janubhai Patel and Wolfgang Polak and Aaron Archer Waterman and Orion Wolfe},
  organization = {US Patent 9746849},
  month = {August},
  year = 2017
}
@manual{PoPat19-1,
  title = {Tool failure analysis using space-distorted similarity},
  author = {Sanjeev Kaushal and Sukesh Janubhai Patel and Wolfgang Polak and Orion Wolfe},
  organization = {US Patent 10228678},
  month = {March},
  year = 2019
}
@manual{PoPat20-1,
  title = {Method and apparatus for autonomous tool parameter impact identification system for semiconductor manufacturing},
  author = {Sanjeev Kaushal and Sukesh Janubhai Patel and Wolfgang Polak and Orion Wolfe},
  organization = {US Patent 10571900},
  month = {February},
  year = 2020
}

This file was generated by bibtex2html 1.99.