@BOOK{NAP author = "National Academies of Sciences, Engineering, and Medicine", editor = "Constance F. Citro and Michael L. Cohen", title = "The Bicentennial Census: New Directions for Methodology in 1990: 30th Anniversary Edition", isbn = "978-0-309-37297-8", abstract = "In 1982 the Census Bureau requested the Committee on National Statistics to establish a panel to suggest research and experiments, to recommend improved methods, and to guide the Census Bureau on technical problems in appraising contending methods with regard to the conduct of the decennial census. In response, the panel produced an interim report that focused on recommendations for improvements in census methodology that warranted early investigation and testing. This report updates and expands the ideas and conclusions about decennial census methodology. \n", url = "https://nap.nationalacademies.org/catalog/21728/the-bicentennial-census-new-directions-for-methodology-in-1990-30th", year = 2015, publisher = "The National Academies Press", address = "Washington, DC" } @BOOK{NAP author = "National Research Council", title = "Developing a 21st Century Global Library for Mathematics Research", isbn = "978-0-309-29848-3", abstract = "Like most areas of scholarship, mathematics is a cumulative discipline: new research is reliant on well-organized and well-curated literature. Because of the precise definitions and structures within mathematics, today's information technologies and machine learning tools provide an opportunity to further organize and enhance discoverability of the mathematics literature in new ways, with the potential to significantly facilitate mathematics research and learning. Opportunities exist to enhance discoverability directly via new technologies and also by using technology to capture important interactions between mathematicians and the literature for later sharing and reuse.\nDeveloping a 21st Century Global Library for Mathematics Research discusses how information about what the mathematical literature contains can be formalized and made easier to express, encode, and explore. Many of the tools necessary to make this information system a reality will require much more than indexing and will instead depend on community input paired with machine learning, where mathematicians' expertise can fill the gaps of automatization. This report proposes the establishment of an organization; the development of a set of platforms, tools, and services; the deployment of an ongoing applied research program to complement the development work; and the mobilization and coordination of the mathematical community to take the first steps toward these capabilities. The report recommends building on the extensive work done by many dedicated individuals under the rubric of the World Digital Mathematical Library, as well as many other community initiatives. Developing a 21st Century Global Library for Mathematics envisions a combination of machine learning methods and community-based editorial effort that makes a significantly greater portion of the information and knowledge in the global mathematical corpus available to researchers as linked open data through a central organizational entity-referred to in the report as the Digital Mathematics Library. This report describes how such a library might operate - discussing development and research needs, role in facilitating discover and interaction, and establishing partnerships with publishers. ", url = "https://nap.nationalacademies.org/catalog/18619/developing-a-21st-century-global-library-for-mathematics-research", year = 2014, publisher = "The National Academies Press", address = "Washington, DC" } @BOOK{NAP title = "Record Linkage Techniques -- 1997: Proceedings of an International Workshop and Exposition", url = "https://nap.nationalacademies.org/catalog/6491/record-linkage-techniques-1997-proceedings-of-an-international-workshop-and", year = 1999, publisher = "The National Academies Press", address = "Washington, DC" } @BOOK{NAP author = "National Academies of Sciences, Engineering, and Medicine", editor = "Nancy J. Kirkendall", title = "Using Models to Estimate Hog and Pig Inventories: Proceedings of a Workshop", isbn = "978-0-309-49572-1", abstract = "In 2014, the National Agricultural Statistics Service (NASS) engaged the National Academies of Sciences, Engineering, and Medicine to convene a planning committee to organize a public workshop for an expert open discussion of their then-current livestock models. The models had worked well for some time. Unfortunately beginning in 2013, an epidemic that killed baby pigs broke out in the United States. The epidemic was not fully realized until 2014 and spread to many states. The result was a decline in hog inventories and pork production that was not predicted by the models. NASS delayed the workshop until 2019 while it worked to develop models that could help in times both of equilibrium and shock (disease or disaster), as well as alternative approaches to help detect the onset of a shock. The May 15, 2019, workshop was consistent with NASS\u2019s 2014 intention, but with a focus on a model that can help predict hog inventories over time, including during times of shock. This publication summarizes the presentations and discussions from the workshop.", url = "https://nap.nationalacademies.org/catalog/25526/using-models-to-estimate-hog-and-pig-inventories-proceedings-of", year = 2019, publisher = "The National Academies Press", address = "Washington, DC" } @BOOK{NAP author = "National Academies of Sciences, Engineering, and Medicine", editor = "Michelle Schwalbe", title = "Statistical Challenges in Assessing and Fostering the Reproducibility of Scientific Results: Summary of a Workshop", isbn = "978-0-309-39202-0", abstract = "Questions about the reproducibility of scientific research have been raised in numerous settings and have gained visibility through several high-profile journal and popular press articles. Quantitative issues contributing to reproducibility challenges have been considered (including improper data measurement and analysis, inadequate statistical expertise, and incomplete data, among others), but there is no clear consensus on how best to approach or to minimize these problems. \n\nA lack of reproducibility of scientific results has created some distrust in scientific findings among the general public, scientists, funding agencies, and industries. While studies fail for a variety of reasons, many factors contribute to the lack of perfect reproducibility, including insufficient training in experimental design, misaligned incentives for publication and the implications for university tenure, intentional manipulation, poor data management and analysis, and inadequate instances of statistical inference. \n\nThe workshop summarized in this report was designed not to address the social and experimental challenges but instead to focus on the latter issues of improper data management and analysis, inadequate statistical expertise, incomplete data, and difficulties applying sound statistic inference to the available data. Many efforts have emerged over recent years to draw attention to and improve reproducibility of scientific work. This report uniquely focuses on the statistical perspective of three issues: the extent of reproducibility, the causes of reproducibility failures, and the potential remedies for these failures. \n", url = "https://nap.nationalacademies.org/catalog/21915/statistical-challenges-in-assessing-and-fostering-the-reproducibility-of-scientific-results", year = 2016, publisher = "The National Academies Press", address = "Washington, DC" } @BOOK{NAP author = "National Research Council", title = "Probability and Algorithms", isbn = "978-0-309-04776-0", abstract = "Some of the hardest computational problems have been successfully attacked through the use of probabilistic algorithms, which have an element of randomness to them. Concepts from the field of probability are also increasingly useful in analyzing the performance of algorithms, broadening our understanding beyond that provided by the worst-case or average-case analyses.\nThis book surveys both of these emerging areas on the interface of the mathematical sciences and computer science. It is designed to attract new researchers to this area and provide them with enough background to begin explorations of their own.", url = "https://nap.nationalacademies.org/catalog/2026/probability-and-algorithms", year = 1992, publisher = "The National Academies Press", address = "Washington, DC" } @BOOK{NAP author = "National Academies of Sciences, Engineering, and Medicine", editor = "Robert M. Groves and Brian A. Harris-Kojetin", title = "Federal Statistics, Multiple Data Sources, and Privacy Protection: Next Steps", isbn = "978-0-309-46537-3", abstract = "The environment for obtaining information and providing statistical data for policy makers and the public has changed significantly in the past decade, raising questions about the fundamental survey paradigm that underlies federal statistics. New data sources provide opportunities to develop a new paradigm that can improve timeliness, geographic or subpopulation detail, and statistical efficiency. It also has the potential to reduce the costs of producing federal statistics.\nThe panel's first report described federal statistical agencies' current paradigm, which relies heavily on sample surveys for producing national statistics, and challenges agencies are facing; the legal frameworks and mechanisms for protecting the privacy and confidentiality of statistical data and for providing researchers access to data, and challenges to those frameworks and mechanisms; and statistical agencies access to alternative sources of data. The panel recommended a new approach for federal statistical programs that would combine diverse data sources from government and private sector sources and the creation of a new entity that would provide the foundational elements needed for this new approach, including legal authority to access data and protect privacy.\nThis second of the panel's two reports builds on the analysis, conclusions, and recommendations in the first one. This report assesses alternative methods for implementing a new approach that would combine diverse data sources from government and private sector sources, including describing statistical models for combining data from multiple sources; examining statistical and computer science approaches that foster privacy protections; evaluating frameworks for assessing the quality and utility of alternative data sources; and various models for implementing the recommended new entity. Together, the two reports offer ideas and recommendations to help federal statistical agencies examine and evaluate data from alternative sources and then combine them as appropriate to provide the country with more timely, actionable, and useful information for policy makers, businesses, and individuals.", url = "https://nap.nationalacademies.org/catalog/24893/federal-statistics-multiple-data-sources-and-privacy-protection-next-steps", year = 2017, publisher = "The National Academies Press", address = "Washington, DC" } @BOOK{NAP author = "National Research Council", editor = "Daniel L. Cork and Paul R. Voss", title = "Once, Only Once, and in the Right Place: Residence Rules in the Decennial Census", isbn = "978-0-309-10299-5", abstract = "The usefulness of the U.S. decennial census depends critically on the accuracy with which individual people are counted in specific housing units, at precise geographic locations. The 2000 and other recent censuses have relied on a set of residence rules to craft instructions on the census questionnaire in order to guide respondents to identify their correct "usual residence." Determining the proper place to count such groups as college students, prisoners, and military personnel has always been complicated and controversial; major societal trends such as placement of children in shared custody arrangements and the prevalence of "snowbird" and "sunbird" populations who regularly move to favorable climates further make it difficult to specify ties to one household and one place. Once, Only Once, and in the Right Place reviews the evolution of current residence rules and the way residence concepts are presented to respondents. It proposes major changes to the basic approach of collecting residence information and suggests a program of research to improve the 2010 and future censuses.", url = "https://nap.nationalacademies.org/catalog/11727/once-only-once-and-in-the-right-place-residence-rules", year = 2006, publisher = "The National Academies Press", address = "Washington, DC" } @BOOK{NAP author = "National Research Council", editor = "Michael L. Cohen and Duane L. Steffey and John E. Rolph", title = "Statistics, Testing, and Defense Acquisition: Background Papers", isbn = "978-0-309-06627-3", abstract = "The Panel on Statistical Methods for Testing and Evaluating Defense Systems had a broad mandate\u2014to examine the use of statistics in conjunction with defense testing. This involved examining methods for software testing, reliability test planning and estimation, validation of modeling and simulation, and use of modem techniques for experimental design. Given the breadth of these areas, including the great variety of applications and special issues that arise, making a contribution in each of these areas required that the Panel's work and recommendations be at a relatively general level. However, a variety of more specific research issues were either brought to the Panel's attention by members of the test and acquisition community, e.g., what was referred to as Dubin's challenge (addressed in the Panel's interim report), or were identified by members of the panel. In many of these cases the panel thought that a more in-depth analysis or a more detailed application of suggestions or recommendations made by the Panel would either be useful as input to its deliberations or could be used to help communicate more individual views of members of the Panel to the defense test community. This resulted in several research efforts. Given various criteria, especially immediate relevance to the test and acquisition community, the Panel has decided to make available three technical or background papers, each authored by a Panel member jointly with a colleague. These papers are individual contributions and are not a consensus product of the Panel; however, the Panel has drawn from these papers in preparation of its final report: Statistics, Testing, and Defense Acquisition. The Panel has found each of these papers to be extremely useful and they are strongly recommended to readers of the Panel's final report.", url = "https://nap.nationalacademies.org/catalog/9655/statistics-testing-and-defense-acquisition-background-papers", year = 1999, publisher = "The National Academies Press", address = "Washington, DC" } @BOOK{NAP author = "National Research Council", editor = "Constance F. Citro and Daniel L. Cork and Janet L. Norwood", title = "The 2000 Census: Counting Under Adversity", isbn = "978-0-309-09141-1", abstract = "The decennial census was the federal government\u2019s largest and most complex\npeacetime operation. This report of a panel of the National Research Council\u2019s\nCommittee on National Statistics comprehensively reviews the conduct of the 2000\ncensus and the quality of the resulting data. The panel\u2019s findings cover the planning\nprocess for 2000, which was marked by an atmosphere of intense controversy about\nthe proposed role of statistical techniques in the census enumeration and possible\nadjustment for errors in counting the population. The report addresses the success\nand problems of major innovations in census operations, the completeness of population\ncoverage in 2000, and the quality of both the basic demographic data collected\nfrom all census respondents and the detailed socioeconomic data collected from\nthe census long-form sample (about one-sixth of the population). The panel draws\ncomparisons with the 1990 experience and recommends improvements in the planning\nprocess and design for 2010. The 2000 Census: Counting Under Adversity will be\nan invaluable resource for users of the 2000 data and for policymakers and census\nplanners. It provides a trove of information about the issues that have fueled debate\nabout the census process and about the operations and quality of the nation\u2019s\ntwenty-second decennial enumeration.\n", url = "https://nap.nationalacademies.org/catalog/10907/the-2000-census-counting-under-adversity", year = 2004, publisher = "The National Academies Press", address = "Washington, DC" } @BOOK{NAP author = "National Research Council", editor = "Constance F. Citro and Graham Kalton", title = "Using the American Community Survey: Benefits and Challenges", isbn = "978-0-309-10672-6", abstract = "The American Community Survey (ACS) is a major new initiative from the U.S. Census Bureau designed to provide continuously updated information on the numbers and characteristics of the nation\u2019s people and housing. It replaces the \u201clong form\u201d of the decennial census. Using the American Community Survey covers the basics of how the ACS design and operations differ from the long-form sample; using the ACS for such applications as formula allocation of federal and state funds, transportation planning, and public information; and challenges in working with ACS estimates that cover periods of 12, 36, or 60 months depending on the population size of an area. \t\n\nThis book also recommends priority areas for continued research and development by the U.S. Census Bureau to guide the evolution of the ACS, and provides detailed, comprehensive analysis and guidance for users in federal, state, and local government agencies, academia, and media.", url = "https://nap.nationalacademies.org/catalog/11901/using-the-american-community-survey-benefits-and-challenges", year = 2007, publisher = "The National Academies Press", address = "Washington, DC" } @BOOK{NAP editor = "John Derbyshire", title = "Unknown Quantity: A Real and Imaginary History of Algebra", isbn = "978-0-309-09657-7", abstract = "Prime Obsession taught us not to be afraid to put the math in a math book. Unknown Quantity heeds the lesson well. So grab your graphing calculators, slip out the slide rules, and buckle up! John Derbyshire is introducing us to algebra through the ages -- and it promises to be just what his die-hard fans have been waiting for. \"Here is the story of algebra.\" With this deceptively simple introduction, we begin our journey. Flanked by formulae, shadowed by roots and radicals, escorted by an expert who navigates unerringly on our behalf, we are guaranteed safe passage through even the most treacherous mathematical terrain. Our first encounter with algebraic arithmetic takes us back 38 centuries to the time of Abraham and Isaac, Jacob and Joseph, Ur and Haran, Sodom and Gomorrah. Moving deftly from Abel's proof to the higher levels of abstraction developed by Galois, we are eventually introduced to what algebraists have been focusing on during the last century. As we travel through the ages, it becomes apparent that the invention of algebra was more than the start of a specific discipline of mathematics -- it was also the birth of a new way of thinking that clarified both basic numeric concepts as well as our perception of the world around us. Algebraists broke new ground when they discarded the simple search for solutions to equations and concentrated instead on abstract groups. This dramatic shift in thinking revolutionized mathematics. Written for those among us who are unencumbered by a fear of formulae, Unknown Quantity delivers on its promise to present a history of algebra. Astonishing in its bold presentation of the math and graced with narrative authority, our journey through the world of algebra is at once intellectually satisfying and pleasantly challenging.", url = "https://nap.nationalacademies.org/catalog/11540/unknown-quantity-a-real-and-imaginary-history-of-algebra", year = 2006, publisher = "The National Academies Press", address = "Washington, DC" } @BOOK{NAP author = "National Academies of Sciences, Engineering, and Medicine", title = "Affordability of National Flood Insurance Program Premiums: Report 2", isbn = "978-0-309-38077-5", abstract = "When Congress authorized the National Flood Insurance Program (NFIP) in 1968, it intended for the program to encourage community initiatives in flood risk management, charge insurance premiums consistent with actuarial pricing principles, and encourage the purchase of flood insurance by owners of flood prone properties, in part, by offering affordable premiums. The NFIP has been reauthorized many times since 1968, most recently with the Biggert-Waters Flood Insurance Reform Act of 2012 (BW 2012). In this most recent reauthorization, Congress placed a particular emphasis on setting flood insurance premiums following actuarial pricing principles, which was motivated by a desire to ensure future revenues were adequate to pay claims and administrative expenses. BW 2012 was designed to move the NFIP towards risk-based premiums for all flood insurance policies. The result was to be increased premiums for some policyholders that had been paying less than NFIP risk-based premiums and to possibly increase premiums for all policyholders. \n\nRecognition of this possibility and concern for the affordability of flood insurance is reflected in sections of the Homeowner Flood Insurance Affordability Act of 2014 (HFIAA 2014). These sections called on FEMA to propose a draft affordability framework for the NFIP after completing an analysis of the efforts of possible programs for offering \"means-tested assistance\" to policyholders for whom higher rates may not be affordable. \n\nBW 2012 and HFIAA 2014 mandated that FEMA conduct a study, in cooperation with the National Academies of Sciences, Engineering, and Medicine, which would compare the costs of a program of risk-based rates and means-tested assistance to the current system of subsidized flood insurance rates and federally funded disaster relief for people without coverage. Production of two reports was agreed upon to fulfill this mandate. This second report proposes alternative approaches for a national evaluation of affordability program policy options and includes lessons for the design of a national study from a proof-of-concept pilot study. \n", url = "https://nap.nationalacademies.org/catalog/21848/affordability-of-national-flood-insurance-program-premiums-report-2", year = 2016, publisher = "The National Academies Press", address = "Washington, DC" } @BOOK{NAP author = "National Research Council", title = "Statistical Analysis of Massive Data Streams: Proceedings of a Workshop", abstract = "Massive data streams, large quantities of data that arrive continuously, are becoming increasingly commonplace in many areas of science and technology. Consequently development of analytical methods for such streams is of growing importance. To address this issue, the National Security Agency asked the NRC to hold a workshop to explore methods for analysis of streams of data so as to stimulate progress in the field. This report presents the results of that workshop. It provides presentations that focused on five different research areas where massive data streams are present: atmospheric and meteorological data; high-energy physics; integrated data systems; network traffic; and mining commercial data streams. The goals of the report are to improve communication among researchers in the field and to increase relevant statistical science activity.", url = "https://nap.nationalacademies.org/catalog/11098/statistical-analysis-of-massive-data-streams-proceedings-of-a-workshop", year = 2004, publisher = "The National Academies Press", address = "Washington, DC" } @BOOK{NAP author = "National Research Council", editor = "Lawrence D. Brown and Michael L. Cohen and Daniel L. Cork", title = "Experimentation and Evaluation Plans for the 2010 Census: Interim Report", isbn = "978-0-309-11545-2", abstract = "For the past 50 years, the Census Bureau has conducted experiments and evaluations with every decennial census involving field data collection during which alternatives to current census processes are assessed for a subset of the population. An \"evaluation\" is usually a post hoc analysis of data collected as part of the decennial census processing to determine whether individual steps in the census operated as expected. The 2010 Program for Evaluations and Experiments, known as CPEX, has enormous potential to reduce costs and increase effectiveness of the 2020 census by reducing the initial list of potential research topics from 52 to 6. The panel identified three priority experiments for inclusion in the 2010 census to assist 2020 census planning: (1) an experiment on the use of the Internet for data collection; (2) an experiment on the use of administrative records for various census purposes; and (3) an experiment (or set of experiments) on features of the census questionnaire. They also came up with 11 recommendations to improve efficiency and quality of data collection including allowing use of the Internet for data submission and including one or more alternate questionnaire experiments to examine things such as the representation of race and ethnicity.", url = "https://nap.nationalacademies.org/catalog/12080/experimentation-and-evaluation-plans-for-the-2010-census-interim-report", year = 2008, publisher = "The National Academies Press", address = "Washington, DC" } @BOOK{NAP author = "National Research Council", editor = "Robert Bell and Michael L. Cohen", title = "Research and Plans for Coverage Measurement in the 2010 Census: Interim Assessment", url = "https://nap.nationalacademies.org/catalog/11941/research-and-plans-for-coverage-measurement-in-the-2010-census", year = 2007, publisher = "The National Academies Press", address = "Washington, DC" } @BOOK{NAP author = "National Research Council", editor = "William F. Eddy and Krisztina Marton", title = "Effective Tracking of Building Energy Use: Improving the Commercial Buildings and Residential Energy Consumption Surveys", isbn = "978-0-309-25401-4", abstract = "The United States is responsible for nearly one-fifth of the world's energy consumption. Population growth, and the associated growth in housing, commercial floor space, transportation, goods, and services is expected to cause a 0.7 percent annual increase in energy demand for the foreseeable future. The energy used by the commercial and residential sectors represents approximately 40 percent of the nation's total energy consumption, and the share of these two sectors is expected to increase in the future. \n\nThe Commercial Buildings Energy Consumption Survey (CBECS) and Residential Energy Consumption Survey (RECS) are two major surveys conducted by the Energy Information Administration. The surveys are the most relevant sources of data available to researchers and policy makers on energy consumption in the commercial and residential sectors. Many of the design decisions and operational procedures for the CBECS and RECS were developed in the 1970s and 1980s, and resource limitations during much of the time since then have prevented EIA from making significant changes to the data collections. Effective Tracking of Building Energy Use makes recommendations for redesigning the surveys based on a review of evolving data user needs and an assessment of new developments in relevant survey methods.", url = "https://nap.nationalacademies.org/catalog/13360/effective-tracking-of-building-energy-use-improving-the-commercial-buildings", year = 2012, publisher = "The National Academies Press", address = "Washington, DC" } @BOOK{NAP author = "National Research Council", editor = "Daniel L. Cork and Michael L. Cohen and Benjamin F. King", title = "Reengineering the 2010 Census: Risks and Challenges", isbn = "978-0-309-09189-3", abstract = "At the request of the U.S. Census Bureau, the National Research Council\u2019s\nCommittee on National Statistics established the Panel on Research on Future\nCensus Methods to review the early planning process for the 2010 census. This new\nreport documents the panel\u2019s strong support for the major aims of the Census\nBureau\u2019s emerging plan for 2010. At the same time, it notes the considerable challenges\nthat must be overcome if the bureau\u2019s innovations are to be successful. The\npanel agrees with the Census Bureau that implementation of the American\nCommunity Survey and, with it, the separation of the long form from the census\nprocess are excellent concepts. Moreover, it concurs that the critically important\nMaster Address File and TIGER geographic systems are in dire need of comprehensive\nupdating and that new technologies have the potential to improve the accuracy\nof the count. The report identifies the risks and rewards of these and other components\nof the Census Bureau\u2019s plan. The report emphasizes the need for the bureau to\nlink its research and evaluation efforts much more closely to operational planning\nand the importance of funding for a comprehensive and rigorous testing program\nbefore 2010.\n", url = "https://nap.nationalacademies.org/catalog/10959/reengineering-the-2010-census-risks-and-challenges", year = 2004, publisher = "The National Academies Press", address = "Washington, DC" } @BOOK{NAP author = "National Research Council", editor = "Paul R. Voss and Krisztina Marton", title = "Small Populations, Large Effects: Improving the Measurement of the Group Quarters Population in the American Community Survey", isbn = "978-0-309-25560-8", abstract = "In the early 1990s, the Census Bureau proposed a program of continuous measurement as a possible alternative to the gathering of detailed social, economic, and housing data from a sample of the U.S. population as part of the decennial census. The American Community Survey (ACS) became a reality in 2005, and has included group quarters (GQ)-such places as correctional facilities for adults, student housing, nursing facilities, inpatient hospice facilities, and military barracks-since 2006, primarily to more closely replicate the design and data products of the census long-form sample.\nThe decision to include group quarters in the ACS enables the Census Bureau to provide a comprehensive benchmark of the total U.S. population (not just those living in households). However, the fact that the ACS must rely on a sample of what is a small and very diverse population, combined with limited funding available for survey operations, makes the ACS GQ sampling, data collection, weighting, and estimation procedures more complex and the estimates more susceptible to problems stemming from these limitations. The concerns are magnified in small areas, particularly in terms of detrimental effects on the total population estimates produced for small areas.\nSmall Populations, Large Effects provides an in-depth review of the statistical methodology for measuring the GQ population in the ACS. This report addresses difficulties associated with measuring the GQ population and the rationale for including GQs in the ACS. Considering user needs for ACS data and of operational feasibility and compatibility with the treatment of the household population in the ACS, the report recommends alternatives to the survey design and other methodological features that can make the ACS more useful for users of small-area data.", url = "https://nap.nationalacademies.org/catalog/13387/small-populations-large-effects-improving-the-measurement-of-the-group", year = 2012, publisher = "The National Academies Press", address = "Washington, DC" } @BOOK{NAP author = "National Research Council", editor = "Robert M. Bell and Michael L. Cohen", title = "Coverage Measurement in the 2010 Census", isbn = "978-0-309-12826-1", abstract = "The census coverage measurement programs have historically addressed three primary objectives: (1) to inform users about the quality of the census counts; (2) to help identify sources of error to improve census taking, and (3) to provide alternative counts based on information from the coverage measurement program. \nIn planning the 1990 and 2000 censuses, the main objective was to produce alternative counts based on the measurement of net coverage error. For the 2010 census coverage measurement program, the Census Bureau will deemphasize that goal, and is instead planning to focus on the second goal of improving census processes.\n\nThis book, which details the findings of the National Research Council's Panel on Coverage Evaluation and Correlation Bias, strongly supports the Census Bureau's change in goal. However, the panel finds that the current plans for data collection, data analysis, and data products are still too oriented towards measurement of net coverage error to fully exploit this new focus. Although the Census Bureau has taken several important steps to revise data collection and analysis procedures and data products, this book recommends further steps to enhance the value of coverage measurement for the improvement of future census processes.\n ", url = "https://nap.nationalacademies.org/catalog/12524/coverage-measurement-in-the-2010-census", year = 2009, publisher = "The National Academies Press", address = "Washington, DC" }